summaryrefslogtreecommitdiffstats
path: root/src/pybind/mgr/dashboard/tests
diff options
context:
space:
mode:
Diffstat (limited to 'src/pybind/mgr/dashboard/tests')
-rw-r--r--src/pybind/mgr/dashboard/tests/__init__.py366
-rw-r--r--src/pybind/mgr/dashboard/tests/helper.py56
-rw-r--r--src/pybind/mgr/dashboard/tests/test_access_control.py871
-rw-r--r--src/pybind/mgr/dashboard/tests/test_api_auditing.py93
-rw-r--r--src/pybind/mgr/dashboard/tests/test_auth.py66
-rw-r--r--src/pybind/mgr/dashboard/tests/test_ceph_service.py170
-rw-r--r--src/pybind/mgr/dashboard/tests/test_cephfs.py42
-rw-r--r--src/pybind/mgr/dashboard/tests/test_controllers.py191
-rw-r--r--src/pybind/mgr/dashboard/tests/test_daemon.py41
-rw-r--r--src/pybind/mgr/dashboard/tests/test_docs.py125
-rw-r--r--src/pybind/mgr/dashboard/tests/test_erasure_code_profile.py29
-rw-r--r--src/pybind/mgr/dashboard/tests/test_exceptions.py161
-rw-r--r--src/pybind/mgr/dashboard/tests/test_feature_toggles.py65
-rw-r--r--src/pybind/mgr/dashboard/tests/test_grafana.py133
-rw-r--r--src/pybind/mgr/dashboard/tests/test_home.py74
-rw-r--r--src/pybind/mgr/dashboard/tests/test_host.py509
-rw-r--r--src/pybind/mgr/dashboard/tests/test_iscsi.py1008
-rw-r--r--src/pybind/mgr/dashboard/tests/test_nfs.py240
-rw-r--r--src/pybind/mgr/dashboard/tests/test_notification.py137
-rw-r--r--src/pybind/mgr/dashboard/tests/test_orchestrator.py40
-rw-r--r--src/pybind/mgr/dashboard/tests/test_osd.py434
-rw-r--r--src/pybind/mgr/dashboard/tests/test_plugin_debug.py38
-rw-r--r--src/pybind/mgr/dashboard/tests/test_pool.py121
-rw-r--r--src/pybind/mgr/dashboard/tests/test_prometheus.py131
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rbd_mirroring.py195
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rbd_service.py180
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rest_client.py110
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rest_tasks.py92
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rgw.py225
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rgw_client.py355
-rw-r--r--src/pybind/mgr/dashboard/tests/test_settings.py208
-rw-r--r--src/pybind/mgr/dashboard/tests/test_ssl.py28
-rw-r--r--src/pybind/mgr/dashboard/tests/test_sso.py153
-rw-r--r--src/pybind/mgr/dashboard/tests/test_task.py433
-rw-r--r--src/pybind/mgr/dashboard/tests/test_tools.py211
-rw-r--r--src/pybind/mgr/dashboard/tests/test_versioning.py79
36 files changed, 7410 insertions, 0 deletions
diff --git a/src/pybind/mgr/dashboard/tests/__init__.py b/src/pybind/mgr/dashboard/tests/__init__.py
new file mode 100644
index 000000000..af264b3b1
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/__init__.py
@@ -0,0 +1,366 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=too-many-arguments
+from __future__ import absolute_import
+
+import contextlib
+import json
+import logging
+import threading
+import time
+from typing import Any, Dict, List, Optional
+from unittest import mock
+from unittest.mock import Mock
+
+import cherrypy
+from cherrypy._cptools import HandlerWrapperTool
+from cherrypy.test import helper
+from mgr_module import HandleCommandResult
+from orchestrator import HostSpec, InventoryHost
+from pyfakefs import fake_filesystem
+
+from .. import mgr
+from ..controllers import generate_controller_routes, json_error_page
+from ..controllers._version import APIVersion
+from ..module import Module
+from ..plugins import PLUGIN_MANAGER, debug, feature_toggles # noqa
+from ..services.auth import AuthManagerTool
+from ..services.exception import dashboard_exception_handler
+from ..tools import RequestLoggingTool
+
+PLUGIN_MANAGER.hook.init()
+PLUGIN_MANAGER.hook.register_commands()
+
+
+logger = logging.getLogger('tests')
+
+
+class ModuleTestClass(Module):
+ """Dashboard module subclass for testing the module methods."""
+
+ def __init__(self) -> None:
+ pass
+
+ def _unconfigure_logging(self) -> None:
+ pass
+
+
+class CmdException(Exception):
+ def __init__(self, retcode, message):
+ super(CmdException, self).__init__(message)
+ self.retcode = retcode
+
+
+class KVStoreMockMixin(object):
+ CONFIG_KEY_DICT = {}
+
+ @classmethod
+ def mock_set_module_option(cls, attr, val):
+ cls.CONFIG_KEY_DICT[attr] = val
+
+ @classmethod
+ def mock_get_module_option(cls, attr, default=None):
+ return cls.CONFIG_KEY_DICT.get(attr, default)
+
+ @classmethod
+ def mock_kv_store(cls):
+ cls.CONFIG_KEY_DICT.clear()
+ mgr.set_module_option.side_effect = cls.mock_set_module_option
+ mgr.get_module_option.side_effect = cls.mock_get_module_option
+ # kludge below
+ mgr.set_store.side_effect = cls.mock_set_module_option
+ mgr.get_store.side_effect = cls.mock_get_module_option
+
+ @classmethod
+ def get_key(cls, key):
+ return cls.CONFIG_KEY_DICT.get(key, None)
+
+
+# pylint: disable=protected-access
+class CLICommandTestMixin(KVStoreMockMixin):
+ _dashboard_module = ModuleTestClass()
+
+ @classmethod
+ def exec_cmd(cls, cmd, **kwargs):
+ inbuf = kwargs['inbuf'] if 'inbuf' in kwargs else None
+ cmd_dict = {'prefix': 'dashboard {}'.format(cmd)}
+ cmd_dict.update(kwargs)
+
+ result = HandleCommandResult(*cls._dashboard_module._handle_command(inbuf, cmd_dict))
+
+ if result.retval < 0:
+ raise CmdException(result.retval, result.stderr)
+ try:
+ return json.loads(result.stdout)
+ except ValueError:
+ return result.stdout
+
+
+class FakeFsMixin(object):
+ fs = fake_filesystem.FakeFilesystem()
+ f_open = fake_filesystem.FakeFileOpen(fs)
+ f_os = fake_filesystem.FakeOsModule(fs)
+ builtins_open = 'builtins.open'
+
+
+class ControllerTestCase(helper.CPWebCase):
+ _endpoints_cache = {}
+
+ @classmethod
+ def setup_controllers(cls, ctrl_classes, base_url='', cp_config: Dict[str, Any] = None):
+ if not isinstance(ctrl_classes, list):
+ ctrl_classes = [ctrl_classes]
+ mapper = cherrypy.dispatch.RoutesDispatcher()
+ endpoint_list = []
+ for ctrl in ctrl_classes:
+ ctrl._cp_config = {
+ 'tools.dashboard_exception_handler.on': True,
+ 'tools.authenticate.on': False
+ }
+ if cp_config:
+ ctrl._cp_config.update(cp_config)
+ inst = ctrl()
+
+ # We need to cache the controller endpoints because
+ # BaseController#endpoints method is not idempontent
+ # and a controller might be needed by more than one
+ # unit test.
+ if ctrl not in cls._endpoints_cache:
+ ctrl_endpoints = ctrl.endpoints()
+ cls._endpoints_cache[ctrl] = ctrl_endpoints
+
+ ctrl_endpoints = cls._endpoints_cache[ctrl]
+ for endpoint in ctrl_endpoints:
+ endpoint.inst = inst
+ endpoint_list.append(endpoint)
+ endpoint_list = sorted(endpoint_list, key=lambda e: e.url)
+ for endpoint in endpoint_list:
+ generate_controller_routes(endpoint, mapper, base_url)
+ if base_url == '':
+ base_url = '/'
+ cherrypy.tree.mount(None, config={
+ base_url: {'request.dispatch': mapper}})
+
+ _request_logging = False
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cherrypy.tools.authenticate = AuthManagerTool()
+ cherrypy.tools.dashboard_exception_handler = HandlerWrapperTool(dashboard_exception_handler,
+ priority=31)
+ cherrypy.config.update({
+ 'error_page.default': json_error_page,
+ 'tools.json_in.on': True,
+ 'tools.json_in.force': False
+ })
+ PLUGIN_MANAGER.hook.configure_cherrypy(config=cherrypy.config)
+
+ if cls._request_logging:
+ cherrypy.tools.request_logging = RequestLoggingTool()
+ cherrypy.config.update({'tools.request_logging.on': True})
+
+ @classmethod
+ def tearDownClass(cls):
+ if cls._request_logging:
+ cherrypy.config.update({'tools.request_logging.on': False})
+
+ def _request(self, url, method, data=None, headers=None, version=APIVersion.DEFAULT):
+ if not data:
+ b = None
+ if version:
+ h = [('Accept', version.to_mime_type()),
+ ('Content-Length', '0')]
+ else:
+ h = None
+ else:
+ b = json.dumps(data)
+ if version is not None:
+ h = [('Accept', version.to_mime_type()),
+ ('Content-Type', 'application/json'),
+ ('Content-Length', str(len(b)))]
+
+ else:
+ h = [('Content-Type', 'application/json'),
+ ('Content-Length', str(len(b)))]
+
+ if headers:
+ h = headers
+ self.getPage(url, method=method, body=b, headers=h)
+
+ def _get(self, url, headers=None, version=APIVersion.DEFAULT):
+ self._request(url, 'GET', headers=headers, version=version)
+
+ def _post(self, url, data=None, version=APIVersion.DEFAULT):
+ self._request(url, 'POST', data, version=version)
+
+ def _delete(self, url, data=None, version=APIVersion.DEFAULT):
+ self._request(url, 'DELETE', data, version=version)
+
+ def _put(self, url, data=None, version=APIVersion.DEFAULT):
+ self._request(url, 'PUT', data, version=version)
+
+ def _task_request(self, method, url, data, timeout, version=APIVersion.DEFAULT):
+ self._request(url, method, data, version=version)
+ if self.status != '202 Accepted':
+ logger.info("task finished immediately")
+ return
+
+ res = self.json_body()
+ self.assertIsInstance(res, dict)
+ self.assertIn('name', res)
+ self.assertIn('metadata', res)
+
+ task_name = res['name']
+ task_metadata = res['metadata']
+
+ # pylint: disable=protected-access
+ class Waiter(threading.Thread):
+ def __init__(self, task_name, task_metadata, tc):
+ super(Waiter, self).__init__()
+ self.task_name = task_name
+ self.task_metadata = task_metadata
+ self.ev = threading.Event()
+ self.abort = False
+ self.res_task = None
+ self.tc = tc
+
+ def run(self):
+ running = True
+ while running and not self.abort:
+ logger.info("task (%s, %s) is still executing", self.task_name,
+ self.task_metadata)
+ time.sleep(1)
+ self.tc._get('/api/task?name={}'.format(self.task_name), version=version)
+ res = self.tc.json_body()
+ for task in res['finished_tasks']:
+ if task['metadata'] == self.task_metadata:
+ # task finished
+ running = False
+ self.res_task = task
+ self.ev.set()
+
+ thread = Waiter(task_name, task_metadata, self)
+ thread.start()
+ status = thread.ev.wait(timeout)
+ if not status:
+ # timeout expired
+ thread.abort = True
+ thread.join()
+ raise Exception("Waiting for task ({}, {}) to finish timed out"
+ .format(task_name, task_metadata))
+ logger.info("task (%s, %s) finished", task_name, task_metadata)
+ if thread.res_task['success']:
+ self.body = json.dumps(thread.res_task['ret_value'])
+ if method == 'POST':
+ self.status = '201 Created'
+ elif method == 'PUT':
+ self.status = '200 OK'
+ elif method == 'DELETE':
+ self.status = '204 No Content'
+ return
+
+ if 'status' in thread.res_task['exception']:
+ self.status = thread.res_task['exception']['status']
+ else:
+ self.status = 500
+ self.body = json.dumps(thread.res_task['exception'])
+
+ def _task_post(self, url, data=None, timeout=60, version=APIVersion.DEFAULT):
+ self._task_request('POST', url, data, timeout, version=version)
+
+ def _task_delete(self, url, timeout=60, version=APIVersion.DEFAULT):
+ self._task_request('DELETE', url, None, timeout, version=version)
+
+ def _task_put(self, url, data=None, timeout=60, version=APIVersion.DEFAULT):
+ self._task_request('PUT', url, data, timeout, version=version)
+
+ def json_body(self):
+ body_str = self.body.decode('utf-8') if isinstance(self.body, bytes) else self.body
+ return json.loads(body_str)
+
+ def assertJsonBody(self, data, msg=None): # noqa: N802
+ """Fail if value != self.body."""
+ json_body = self.json_body()
+ if data != json_body:
+ if msg is None:
+ msg = 'expected body:\n%r\n\nactual body:\n%r' % (
+ data, json_body)
+ self._handlewebError(msg)
+
+ def assertInJsonBody(self, data, msg=None): # noqa: N802
+ json_body = self.json_body()
+ if data not in json_body:
+ if msg is None:
+ msg = 'expected %r to be in %r' % (data, json_body)
+ self._handlewebError(msg)
+
+
+class Stub:
+ """Test class for returning predefined values"""
+
+ @classmethod
+ def get_mgr_no_services(cls):
+ mgr.get = Mock(return_value={})
+
+
+class RgwStub(Stub):
+
+ @classmethod
+ def get_daemons(cls):
+ mgr.get = Mock(return_value={'services': {'rgw': {'daemons': {
+ '5297': {
+ 'addr': '192.168.178.3:49774/1534999298',
+ 'metadata': {
+ 'frontend_config#0': 'beast port=8000',
+ 'id': 'daemon1',
+ 'realm_name': 'realm1',
+ 'zonegroup_name': 'zonegroup1',
+ 'zone_name': 'zone1',
+ 'hostname': 'daemon1.server.lan'
+ }
+ },
+ '5398': {
+ 'addr': '[2001:db8:85a3::8a2e:370:7334]:49774/1534999298',
+ 'metadata': {
+ 'frontend_config#0': 'civetweb port=8002',
+ 'id': 'daemon2',
+ 'realm_name': 'realm2',
+ 'zonegroup_name': 'zonegroup2',
+ 'zone_name': 'zone2',
+ 'hostname': 'daemon2.server.lan'
+ }
+ }
+ }}}})
+
+ @classmethod
+ def get_settings(cls):
+ settings = {
+ 'RGW_API_ACCESS_KEY': 'fake-access-key',
+ 'RGW_API_SECRET_KEY': 'fake-secret-key',
+ }
+ mgr.get_module_option = Mock(side_effect=settings.get)
+
+
+@contextlib.contextmanager
+def patch_orch(available: bool, missing_features: Optional[List[str]] = None,
+ hosts: Optional[List[HostSpec]] = None,
+ inventory: Optional[List[dict]] = None):
+ with mock.patch('dashboard.controllers.orchestrator.OrchClient.instance') as instance:
+ fake_client = mock.Mock()
+ fake_client.available.return_value = available
+ fake_client.get_missing_features.return_value = missing_features
+
+ if hosts is not None:
+ fake_client.hosts.list.return_value = hosts
+
+ if inventory is not None:
+ def _list_inventory(hosts=None, refresh=False): # pylint: disable=unused-argument
+ inv_hosts = []
+ for inv_host in inventory:
+ if hosts is None or inv_host['name'] in hosts:
+ inv_hosts.append(InventoryHost.from_json(inv_host))
+ return inv_hosts
+ fake_client.inventory.list.side_effect = _list_inventory
+
+ instance.return_value = fake_client
+ yield fake_client
diff --git a/src/pybind/mgr/dashboard/tests/helper.py b/src/pybind/mgr/dashboard/tests/helper.py
new file mode 100644
index 000000000..e71133a10
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/helper.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+try:
+ from typing import Any, Dict
+except ImportError:
+ pass
+
+
+def update_dict(data, update_data):
+ # type: (Dict[Any, Any], Dict[Any, Any]) -> Dict[Any]
+ """ Update a dictionary recursively.
+
+ Eases doing so by providing the option to separate the key to be updated by dot characters. If
+ a key provided does not exist, it will raise an KeyError instead of just updating the
+ dictionary.
+
+ Limitations
+
+ Please note that the functionality provided by this method can only be used if the dictionary to
+ be updated (`data`) does not contain dot characters in its keys.
+
+ :raises KeyError:
+
+ >>> update_dict({'foo': {'bar': 5}}, {'foo.bar': 10})
+ {'foo': {'bar': 10}}
+
+ >>> update_dict({'foo': {'bar': 5}}, {'xyz': 10})
+ Traceback (most recent call last):
+ ...
+ KeyError: 'xyz'
+
+ >>> update_dict({'foo': {'bar': 5}}, {'foo.xyz': 10})
+ Traceback (most recent call last):
+ ...
+ KeyError: 'xyz'
+ """
+ for k, v in update_data.items():
+ keys = k.split('.')
+ element = None
+ for i, key in enumerate(keys):
+ last = False
+ if len(keys) == i + 1:
+ last = True
+
+ if not element:
+ element = data[key]
+ elif not last:
+ element = element[key] # pylint: disable=unsubscriptable-object
+
+ if last:
+ if key not in element:
+ raise KeyError(key)
+
+ element[key] = v
+ return data
diff --git a/src/pybind/mgr/dashboard/tests/test_access_control.py b/src/pybind/mgr/dashboard/tests/test_access_control.py
new file mode 100644
index 000000000..361f65e60
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_access_control.py
@@ -0,0 +1,871 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=dangerous-default-value,too-many-public-methods
+from __future__ import absolute_import
+
+import errno
+import json
+import tempfile
+import time
+import unittest
+from datetime import datetime, timedelta
+
+from mgr_module import ERROR_MSG_EMPTY_INPUT_FILE
+
+from .. import mgr
+from ..security import Permission, Scope
+from ..services.access_control import SYSTEM_ROLES, AccessControlDB, \
+ PasswordPolicy, load_access_control_db, password_hash
+from ..settings import Settings
+from ..tests import CLICommandTestMixin, CmdException
+
+
+class AccessControlTest(unittest.TestCase, CLICommandTestMixin):
+
+ @classmethod
+ def setUpClass(cls):
+ cls.mock_kv_store()
+ mgr.ACCESS_CONTROL_DB = None
+
+ def setUp(self):
+ self.CONFIG_KEY_DICT.clear()
+ load_access_control_db()
+
+ def load_persistent_db(self):
+ config_key = AccessControlDB.accessdb_config_key()
+ self.assertIn(config_key, self.CONFIG_KEY_DICT)
+ db_json = self.CONFIG_KEY_DICT[config_key]
+ db = json.loads(db_json)
+ return db
+
+ # The DB is written to persistent storage the first time it is saved.
+ # However, should an operation fail due to <reasons>, we may end up in
+ # a state where we have a completely empty CONFIG_KEY_DICT (our mock
+ # equivalent to the persistent state). While this works for most of the
+ # tests in this class, that would prevent us from testing things like
+ # "run a command that is expected to fail, and then ensure nothing
+ # happened", because we'd be asserting in `load_persistent_db()` due to
+ # the map being empty.
+ #
+ # This function will therefore force state to be written to our mock
+ # persistent state. We could have added this extra step to
+ # `load_persistent_db()` directly, but that would conflict with the
+ # upgrade tests. This way, we can selectively enforce this requirement
+ # where we believe it to be necessary; generically speaking, this should
+ # not be needed unless we're testing very specific behaviors.
+ #
+ def setup_and_load_persistent_db(self):
+ mgr.ACCESS_CTRL_DB.save()
+ self.load_persistent_db()
+
+ def validate_persistent_role(self, rolename, scopes_permissions,
+ description=None):
+ db = self.load_persistent_db()
+ self.assertIn('roles', db)
+ self.assertIn(rolename, db['roles'])
+ self.assertEqual(db['roles'][rolename]['name'], rolename)
+ self.assertEqual(db['roles'][rolename]['description'], description)
+ self.assertDictEqual(db['roles'][rolename]['scopes_permissions'],
+ scopes_permissions)
+
+ def validate_persistent_no_role(self, rolename):
+ db = self.load_persistent_db()
+ self.assertIn('roles', db)
+ self.assertNotIn(rolename, db['roles'])
+
+ def validate_persistent_user(self, username, roles, password=None,
+ name=None, email=None, last_update=None,
+ enabled=True, pwdExpirationDate=None):
+ db = self.load_persistent_db()
+ self.assertIn('users', db)
+ self.assertIn(username, db['users'])
+ self.assertEqual(db['users'][username]['username'], username)
+ self.assertListEqual(db['users'][username]['roles'], roles)
+ if password:
+ self.assertEqual(db['users'][username]['password'], password)
+ if name:
+ self.assertEqual(db['users'][username]['name'], name)
+ if email:
+ self.assertEqual(db['users'][username]['email'], email)
+ if last_update:
+ self.assertEqual(db['users'][username]['lastUpdate'], last_update)
+ if pwdExpirationDate:
+ self.assertEqual(db['users'][username]['pwdExpirationDate'], pwdExpirationDate)
+ self.assertEqual(db['users'][username]['enabled'], enabled)
+
+ def validate_persistent_no_user(self, username):
+ db = self.load_persistent_db()
+ self.assertIn('users', db)
+ self.assertNotIn(username, db['users'])
+
+ def test_create_role(self):
+ role = self.exec_cmd('ac-role-create', rolename='test_role')
+ self.assertDictEqual(role, {'name': 'test_role', 'description': None,
+ 'scopes_permissions': {}})
+ self.validate_persistent_role('test_role', {})
+
+ def test_create_role_with_desc(self):
+ role = self.exec_cmd('ac-role-create', rolename='test_role',
+ description='Test Role')
+ self.assertDictEqual(role, {'name': 'test_role',
+ 'description': 'Test Role',
+ 'scopes_permissions': {}})
+ self.validate_persistent_role('test_role', {}, 'Test Role')
+
+ def test_create_duplicate_role(self):
+ self.test_create_role()
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-create', rolename='test_role')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EEXIST)
+ self.assertEqual(str(ctx.exception), "Role 'test_role' already exists")
+
+ def test_delete_role(self):
+ self.test_create_role()
+ out = self.exec_cmd('ac-role-delete', rolename='test_role')
+ self.assertEqual(out, "Role 'test_role' deleted")
+ self.validate_persistent_no_role('test_role')
+
+ def test_delete_nonexistent_role(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-delete', rolename='test_role')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "Role 'test_role' does not exist")
+
+ def test_show_single_role(self):
+ self.test_create_role()
+ role = self.exec_cmd('ac-role-show', rolename='test_role')
+ self.assertDictEqual(role, {'name': 'test_role', 'description': None,
+ 'scopes_permissions': {}})
+
+ def test_show_nonexistent_role(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-show', rolename='test_role')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "Role 'test_role' does not exist")
+
+ def test_show_system_roles(self):
+ roles = self.exec_cmd('ac-role-show')
+ self.assertEqual(len(roles), len(SYSTEM_ROLES))
+ for role in roles:
+ self.assertIn(role, SYSTEM_ROLES)
+
+ def test_show_system_role(self):
+ role = self.exec_cmd('ac-role-show', rolename="read-only")
+ self.assertEqual(role['name'], 'read-only')
+ self.assertEqual(
+ role['description'],
+ 'allows read permission for all security scope except dashboard settings and config-opt'
+ )
+
+ def test_delete_system_role(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-delete', rolename='administrator')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EPERM)
+ self.assertEqual(str(ctx.exception),
+ "Cannot delete system role 'administrator'")
+
+ def test_add_role_scope_perms(self):
+ self.test_create_role()
+ self.exec_cmd('ac-role-add-scope-perms', rolename='test_role',
+ scopename=Scope.POOL,
+ permissions=[Permission.READ, Permission.DELETE])
+ role = self.exec_cmd('ac-role-show', rolename='test_role')
+ self.assertDictEqual(role, {'name': 'test_role',
+ 'description': None,
+ 'scopes_permissions': {
+ Scope.POOL: [Permission.DELETE,
+ Permission.READ]
+ }})
+ self.validate_persistent_role('test_role', {
+ Scope.POOL: [Permission.DELETE, Permission.READ]
+ })
+
+ def test_del_role_scope_perms(self):
+ self.test_add_role_scope_perms()
+ self.exec_cmd('ac-role-add-scope-perms', rolename='test_role',
+ scopename=Scope.MONITOR,
+ permissions=[Permission.READ, Permission.CREATE])
+ self.validate_persistent_role('test_role', {
+ Scope.POOL: [Permission.DELETE, Permission.READ],
+ Scope.MONITOR: [Permission.CREATE, Permission.READ]
+ })
+ self.exec_cmd('ac-role-del-scope-perms', rolename='test_role',
+ scopename=Scope.POOL)
+ role = self.exec_cmd('ac-role-show', rolename='test_role')
+ self.assertDictEqual(role, {'name': 'test_role',
+ 'description': None,
+ 'scopes_permissions': {
+ Scope.MONITOR: [Permission.CREATE,
+ Permission.READ]
+ }})
+ self.validate_persistent_role('test_role', {
+ Scope.MONITOR: [Permission.CREATE, Permission.READ]
+ })
+
+ def test_add_role_scope_perms_nonexistent_role(self):
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-add-scope-perms', rolename='test_role',
+ scopename='pool',
+ permissions=['read', 'delete'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "Role 'test_role' does not exist")
+
+ def test_add_role_invalid_scope_perms(self):
+ self.test_create_role()
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-add-scope-perms', rolename='test_role',
+ scopename='invalidscope',
+ permissions=['read', 'delete'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertEqual(str(ctx.exception),
+ "Scope 'invalidscope' is not valid\n Possible values: "
+ "{}".format(Scope.all_scopes()))
+
+ def test_add_role_scope_invalid_perms(self):
+ self.test_create_role()
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-add-scope-perms', rolename='test_role',
+ scopename='pool', permissions=['invalidperm'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertEqual(str(ctx.exception),
+ "Permission 'invalidperm' is not valid\n Possible "
+ "values: {}".format(Permission.all_permissions()))
+
+ def test_del_role_scope_perms_nonexistent_role(self):
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-del-scope-perms', rolename='test_role',
+ scopename='pool')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "Role 'test_role' does not exist")
+
+ def test_del_role_nonexistent_scope_perms(self):
+ self.test_add_role_scope_perms()
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-del-scope-perms', rolename='test_role',
+ scopename='nonexistentscope')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception),
+ "There are no permissions for scope 'nonexistentscope' "
+ "in role 'test_role'")
+
+ def test_not_permitted_add_role_scope_perms(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-add-scope-perms', rolename='read-only',
+ scopename='pool', permissions=['read', 'delete'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.EPERM)
+ self.assertEqual(str(ctx.exception),
+ "Cannot update system role 'read-only'")
+
+ def test_not_permitted_del_role_scope_perms(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-del-scope-perms', rolename='read-only',
+ scopename='pool')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EPERM)
+ self.assertEqual(str(ctx.exception),
+ "Cannot update system role 'read-only'")
+
+ def test_create_user(self, username='admin', rolename=None, enabled=True,
+ pwdExpirationDate=None):
+ user = self.exec_cmd('ac-user-create', username=username,
+ rolename=rolename, inbuf='admin',
+ name='{} User'.format(username),
+ email='{}@user.com'.format(username),
+ enabled=enabled, force_password=True,
+ pwd_expiration_date=pwdExpirationDate)
+
+ pass_hash = password_hash('admin', user['password'])
+ self.assertDictEqual(user, {
+ 'username': username,
+ 'password': pass_hash,
+ 'pwdExpirationDate': pwdExpirationDate,
+ 'pwdUpdateRequired': False,
+ 'lastUpdate': user['lastUpdate'],
+ 'name': '{} User'.format(username),
+ 'email': '{}@user.com'.format(username),
+ 'roles': [rolename] if rolename else [],
+ 'enabled': enabled
+ })
+ self.validate_persistent_user(username, [rolename] if rolename else [],
+ pass_hash, '{} User'.format(username),
+ '{}@user.com'.format(username),
+ user['lastUpdate'], enabled)
+ return user
+
+ def test_create_disabled_user(self):
+ self.test_create_user(enabled=False)
+
+ def test_create_user_pwd_expiration_date(self):
+ expiration_date = datetime.utcnow() + timedelta(days=10)
+ expiration_date = int(time.mktime(expiration_date.timetuple()))
+ self.test_create_user(pwdExpirationDate=expiration_date)
+
+ def test_create_user_with_role(self):
+ self.test_add_role_scope_perms()
+ self.test_create_user(rolename='test_role')
+
+ def test_create_user_with_system_role(self):
+ self.test_create_user(rolename='administrator')
+
+ def test_delete_user(self):
+ self.test_create_user()
+ out = self.exec_cmd('ac-user-delete', username='admin')
+ self.assertEqual(out, "User 'admin' deleted")
+ users = self.exec_cmd('ac-user-show')
+ self.assertEqual(len(users), 0)
+ self.validate_persistent_no_user('admin')
+
+ def test_create_duplicate_user(self):
+ self.test_create_user()
+ ret = self.exec_cmd('ac-user-create', username='admin', inbuf='admin',
+ force_password=True)
+ self.assertEqual(ret, "User 'admin' already exists")
+
+ def test_create_users_with_dne_role(self):
+ # one time call to setup our persistent db
+ self.setup_and_load_persistent_db()
+
+ # create a user with a role that does not exist; expect a failure
+ try:
+ self.exec_cmd('ac-user-create', username='foo',
+ rolename='dne_role', inbuf='foopass',
+ name='foo User', email='foo@user.com',
+ force_password=True)
+ except CmdException as e:
+ self.assertEqual(e.retcode, -errno.ENOENT)
+
+ db = self.load_persistent_db()
+ if 'users' in db:
+ self.assertNotIn('foo', db['users'])
+
+ # We could just finish our test here, given we ensured that the user
+ # with a non-existent role is not in persistent storage. However,
+ # we're going to test the database's consistency, making sure that
+ # side-effects are not written to persistent storage once we commit
+ # an unrelated operation. To ensure this, we'll issue another
+ # operation that is sharing the same code path, and will check whether
+ # the next operation commits dirty state.
+
+ # create a role (this will be 'test_role')
+ self.test_create_role()
+ self.exec_cmd('ac-user-create', username='bar',
+ rolename='test_role', inbuf='barpass',
+ name='bar User', email='bar@user.com',
+ force_password=True)
+
+ # validate db:
+ # user 'foo' should not exist
+ # user 'bar' should exist and have role 'test_role'
+ self.validate_persistent_user('bar', ['test_role'])
+
+ db = self.load_persistent_db()
+ self.assertIn('users', db)
+ self.assertNotIn('foo', db['users'])
+
+ def test_delete_nonexistent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-delete', username='admin')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_add_user_roles(self, username='admin',
+ roles=['pool-manager', 'block-manager']):
+ user_orig = self.test_create_user(username)
+ uroles = []
+ for role in roles:
+ uroles.append(role)
+ uroles.sort()
+ user = self.exec_cmd('ac-user-add-roles', username=username,
+ roles=[role])
+ self.assertLessEqual(uroles, user['roles'])
+ self.validate_persistent_user(username, uroles)
+ self.assertGreaterEqual(user['lastUpdate'], user_orig['lastUpdate'])
+
+ def test_add_user_roles2(self):
+ user_orig = self.test_create_user()
+ user = self.exec_cmd('ac-user-add-roles', username="admin",
+ roles=['pool-manager', 'block-manager'])
+ self.assertLessEqual(['block-manager', 'pool-manager'],
+ user['roles'])
+ self.validate_persistent_user('admin', ['block-manager',
+ 'pool-manager'])
+ self.assertGreaterEqual(user['lastUpdate'], user_orig['lastUpdate'])
+
+ def test_add_user_roles_not_existent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-add-roles', username="admin",
+ roles=['pool-manager', 'block-manager'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_add_user_roles_not_existent_role(self):
+ self.test_create_user()
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-add-roles', username="admin",
+ roles=['Invalid Role'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception),
+ "Role 'Invalid Role' does not exist")
+
+ def test_set_user_roles(self):
+ user_orig = self.test_create_user()
+ user = self.exec_cmd('ac-user-add-roles', username="admin",
+ roles=['pool-manager'])
+ self.assertLessEqual(['pool-manager'], user['roles'])
+ self.validate_persistent_user('admin', ['pool-manager'])
+ self.assertGreaterEqual(user['lastUpdate'], user_orig['lastUpdate'])
+ user2 = self.exec_cmd('ac-user-set-roles', username="admin",
+ roles=['rgw-manager', 'block-manager'])
+ self.assertLessEqual(['block-manager', 'rgw-manager'],
+ user2['roles'])
+ self.validate_persistent_user('admin', ['block-manager',
+ 'rgw-manager'])
+ self.assertGreaterEqual(user2['lastUpdate'], user['lastUpdate'])
+
+ def test_set_user_roles_not_existent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-roles', username="admin",
+ roles=['pool-manager', 'block-manager'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_set_user_roles_not_existent_role(self):
+ self.test_create_user()
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-roles', username="admin",
+ roles=['Invalid Role'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception),
+ "Role 'Invalid Role' does not exist")
+
+ def test_del_user_roles(self):
+ self.test_add_user_roles()
+ user = self.exec_cmd('ac-user-del-roles', username="admin",
+ roles=['pool-manager'])
+ self.assertLessEqual(['block-manager'], user['roles'])
+ self.validate_persistent_user('admin', ['block-manager'])
+
+ def test_del_user_roles_not_existent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-del-roles', username="admin",
+ roles=['pool-manager', 'block-manager'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_del_user_roles_not_existent_role(self):
+ self.test_create_user()
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-del-roles', username="admin",
+ roles=['Invalid Role'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception),
+ "Role 'Invalid Role' does not exist")
+
+ def test_del_user_roles_not_associated_role(self):
+ self.test_create_user()
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-del-roles', username="admin",
+ roles=['rgw-manager'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception),
+ "Role 'rgw-manager' is not associated with user "
+ "'admin'")
+
+ def test_show_user(self):
+ self.test_add_user_roles()
+ user = self.exec_cmd('ac-user-show', username='admin')
+ pass_hash = password_hash('admin', user['password'])
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'lastUpdate': user['lastUpdate'],
+ 'password': pass_hash,
+ 'pwdExpirationDate': None,
+ 'pwdUpdateRequired': False,
+ 'name': 'admin User',
+ 'email': 'admin@user.com',
+ 'roles': ['block-manager', 'pool-manager'],
+ 'enabled': True
+ })
+
+ def test_show_nonexistent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-show', username='admin')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_show_all_users(self):
+ self.test_add_user_roles('admin', ['administrator'])
+ self.test_add_user_roles('guest', ['read-only'])
+ users = self.exec_cmd('ac-user-show')
+ self.assertEqual(len(users), 2)
+ for user in users:
+ self.assertIn(user, ['admin', 'guest'])
+
+ def test_del_role_associated_with_user(self):
+ self.test_create_role()
+ self.test_add_user_roles('guest', ['test_role'])
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-delete', rolename='test_role')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EPERM)
+ self.assertEqual(str(ctx.exception),
+ "Role 'test_role' is still associated with user "
+ "'guest'")
+
+ def test_set_user_info(self):
+ user_orig = self.test_create_user()
+ user = self.exec_cmd('ac-user-set-info', username='admin',
+ name='Admin Name', email='admin@admin.com')
+ pass_hash = password_hash('admin', user['password'])
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'password': pass_hash,
+ 'pwdExpirationDate': None,
+ 'pwdUpdateRequired': False,
+ 'name': 'Admin Name',
+ 'email': 'admin@admin.com',
+ 'lastUpdate': user['lastUpdate'],
+ 'roles': [],
+ 'enabled': True
+ })
+ self.validate_persistent_user('admin', [], pass_hash, 'Admin Name',
+ 'admin@admin.com')
+ self.assertEqual(user['lastUpdate'], user_orig['lastUpdate'])
+
+ def test_set_user_info_nonexistent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-info', username='admin',
+ name='Admin Name', email='admin@admin.com')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_set_user_password(self):
+ user_orig = self.test_create_user()
+ user = self.exec_cmd('ac-user-set-password', username='admin',
+ inbuf='newpass', force_password=True)
+ pass_hash = password_hash('newpass', user['password'])
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'password': pass_hash,
+ 'pwdExpirationDate': None,
+ 'pwdUpdateRequired': False,
+ 'name': 'admin User',
+ 'email': 'admin@user.com',
+ 'lastUpdate': user['lastUpdate'],
+ 'roles': [],
+ 'enabled': True
+ })
+ self.validate_persistent_user('admin', [], pass_hash, 'admin User',
+ 'admin@user.com')
+ self.assertGreaterEqual(user['lastUpdate'], user_orig['lastUpdate'])
+
+ def test_sanitize_password(self):
+ self.test_create_user()
+ password = 'myPass\\n\\r\\n'
+ with tempfile.TemporaryFile(mode='w+') as pwd_file:
+ # Add new line separators (like some text editors when a file is saved).
+ pwd_file.write('{}{}'.format(password, '\n\r\n\n'))
+ pwd_file.seek(0)
+ user = self.exec_cmd('ac-user-set-password', username='admin',
+ inbuf=pwd_file.read(), force_password=True)
+ pass_hash = password_hash(password, user['password'])
+ self.assertEqual(user['password'], pass_hash)
+
+ def test_set_user_password_nonexistent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-password', username='admin',
+ inbuf='newpass', force_password=True)
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_set_user_password_empty(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-password', username='admin', inbuf='\n',
+ force_password=True)
+
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertIn(ERROR_MSG_EMPTY_INPUT_FILE, str(ctx.exception))
+
+ def test_set_user_password_hash(self):
+ user_orig = self.test_create_user()
+ user = self.exec_cmd('ac-user-set-password-hash', username='admin',
+ inbuf='$2b$12$Pt3Vq/rDt2y9glTPSV.VFegiLkQeIpddtkhoFetNApYmIJOY8gau2')
+ pass_hash = password_hash('newpass', user['password'])
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'password': pass_hash,
+ 'pwdExpirationDate': None,
+ 'pwdUpdateRequired': False,
+ 'name': 'admin User',
+ 'email': 'admin@user.com',
+ 'lastUpdate': user['lastUpdate'],
+ 'roles': [],
+ 'enabled': True
+ })
+ self.validate_persistent_user('admin', [], pass_hash, 'admin User',
+ 'admin@user.com')
+ self.assertGreaterEqual(user['lastUpdate'], user_orig['lastUpdate'])
+
+ def test_set_user_password_hash_nonexistent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-password-hash', username='admin',
+ inbuf='$2b$12$Pt3Vq/rDt2y9glTPSV.VFegiLkQeIpddtkhoFetNApYmIJOY8gau2')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_set_user_password_hash_broken_hash(self):
+ self.test_create_user()
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-password-hash', username='admin',
+ inbuf='1')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertEqual(str(ctx.exception), 'Invalid password hash')
+
+ def test_set_login_credentials(self):
+ self.exec_cmd('set-login-credentials', username='admin',
+ inbuf='admin')
+ user = self.exec_cmd('ac-user-show', username='admin')
+ pass_hash = password_hash('admin', user['password'])
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'password': pass_hash,
+ 'pwdExpirationDate': None,
+ 'pwdUpdateRequired': False,
+ 'name': None,
+ 'email': None,
+ 'lastUpdate': user['lastUpdate'],
+ 'roles': ['administrator'],
+ 'enabled': True,
+ })
+ self.validate_persistent_user('admin', ['administrator'], pass_hash,
+ None, None)
+
+ def test_set_login_credentials_for_existing_user(self):
+ self.test_add_user_roles('admin', ['read-only'])
+ self.exec_cmd('set-login-credentials', username='admin',
+ inbuf='admin2')
+ user = self.exec_cmd('ac-user-show', username='admin')
+ pass_hash = password_hash('admin2', user['password'])
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'password': pass_hash,
+ 'pwdExpirationDate': None,
+ 'pwdUpdateRequired': False,
+ 'name': 'admin User',
+ 'email': 'admin@user.com',
+ 'lastUpdate': user['lastUpdate'],
+ 'roles': ['read-only'],
+ 'enabled': True
+ })
+ self.validate_persistent_user('admin', ['read-only'], pass_hash,
+ 'admin User', 'admin@user.com')
+
+ def test_load_v1(self):
+ self.CONFIG_KEY_DICT['accessdb_v1'] = '''
+ {{
+ "users": {{
+ "admin": {{
+ "username": "admin",
+ "password":
+ "$2b$12$sd0Az7mm3FaJl8kN3b/xwOuztaN0sWUwC1SJqjM4wcDw/s5cmGbLK",
+ "roles": ["block-manager", "test_role"],
+ "name": "admin User",
+ "email": "admin@user.com",
+ "lastUpdate": {}
+ }}
+ }},
+ "roles": {{
+ "test_role": {{
+ "name": "test_role",
+ "description": "Test Role",
+ "scopes_permissions": {{
+ "{}": ["{}", "{}"],
+ "{}": ["{}"]
+ }}
+ }}
+ }},
+ "version": 1
+ }}
+ '''.format(int(round(time.time())), Scope.ISCSI, Permission.READ,
+ Permission.UPDATE, Scope.POOL, Permission.CREATE)
+
+ load_access_control_db()
+ role = self.exec_cmd('ac-role-show', rolename="test_role")
+ self.assertDictEqual(role, {
+ 'name': 'test_role',
+ 'description': "Test Role",
+ 'scopes_permissions': {
+ Scope.ISCSI: [Permission.READ, Permission.UPDATE],
+ Scope.POOL: [Permission.CREATE]
+ }
+ })
+ user = self.exec_cmd('ac-user-show', username="admin")
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'lastUpdate': user['lastUpdate'],
+ 'password':
+ "$2b$12$sd0Az7mm3FaJl8kN3b/xwOuztaN0sWUwC1SJqjM4wcDw/s5cmGbLK",
+ 'pwdExpirationDate': None,
+ 'pwdUpdateRequired': False,
+ 'name': 'admin User',
+ 'email': 'admin@user.com',
+ 'roles': ['block-manager', 'test_role'],
+ 'enabled': True
+ })
+
+ def test_load_v2(self):
+ self.CONFIG_KEY_DICT['accessdb_v2'] = '''
+ {{
+ "users": {{
+ "admin": {{
+ "username": "admin",
+ "password":
+ "$2b$12$sd0Az7mm3FaJl8kN3b/xwOuztaN0sWUwC1SJqjM4wcDw/s5cmGbLK",
+ "pwdExpirationDate": null,
+ "pwdUpdateRequired": false,
+ "roles": ["block-manager", "test_role"],
+ "name": "admin User",
+ "email": "admin@user.com",
+ "lastUpdate": {},
+ "enabled": true
+ }}
+ }},
+ "roles": {{
+ "test_role": {{
+ "name": "test_role",
+ "description": "Test Role",
+ "scopes_permissions": {{
+ "{}": ["{}", "{}"],
+ "{}": ["{}"]
+ }}
+ }}
+ }},
+ "version": 2
+ }}
+ '''.format(int(round(time.time())), Scope.ISCSI, Permission.READ,
+ Permission.UPDATE, Scope.POOL, Permission.CREATE)
+
+ load_access_control_db()
+ role = self.exec_cmd('ac-role-show', rolename="test_role")
+ self.assertDictEqual(role, {
+ 'name': 'test_role',
+ 'description': "Test Role",
+ 'scopes_permissions': {
+ Scope.ISCSI: [Permission.READ, Permission.UPDATE],
+ Scope.POOL: [Permission.CREATE]
+ }
+ })
+ user = self.exec_cmd('ac-user-show', username="admin")
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'lastUpdate': user['lastUpdate'],
+ 'password':
+ "$2b$12$sd0Az7mm3FaJl8kN3b/xwOuztaN0sWUwC1SJqjM4wcDw/s5cmGbLK",
+ 'pwdExpirationDate': None,
+ 'pwdUpdateRequired': False,
+ 'name': 'admin User',
+ 'email': 'admin@user.com',
+ 'roles': ['block-manager', 'test_role'],
+ 'enabled': True
+ })
+
+ def test_password_policy_pw_length(self):
+ Settings.PWD_POLICY_CHECK_LENGTH_ENABLED = True
+ Settings.PWD_POLICY_MIN_LENGTH = 3
+ pw_policy = PasswordPolicy('foo')
+ self.assertTrue(pw_policy.check_password_length())
+
+ def test_password_policy_pw_length_fail(self):
+ Settings.PWD_POLICY_CHECK_LENGTH_ENABLED = True
+ pw_policy = PasswordPolicy('bar')
+ self.assertFalse(pw_policy.check_password_length())
+
+ def test_password_policy_credits_too_weak(self):
+ Settings.PWD_POLICY_CHECK_COMPLEXITY_ENABLED = True
+ pw_policy = PasswordPolicy('foo')
+ pw_credits = pw_policy.check_password_complexity()
+ self.assertEqual(pw_credits, 3)
+
+ def test_password_policy_credits_weak(self):
+ Settings.PWD_POLICY_CHECK_COMPLEXITY_ENABLED = True
+ pw_policy = PasswordPolicy('mypassword1')
+ pw_credits = pw_policy.check_password_complexity()
+ self.assertEqual(pw_credits, 11)
+
+ def test_password_policy_credits_ok(self):
+ Settings.PWD_POLICY_CHECK_COMPLEXITY_ENABLED = True
+ pw_policy = PasswordPolicy('mypassword1!@')
+ pw_credits = pw_policy.check_password_complexity()
+ self.assertEqual(pw_credits, 17)
+
+ def test_password_policy_credits_strong(self):
+ Settings.PWD_POLICY_CHECK_COMPLEXITY_ENABLED = True
+ pw_policy = PasswordPolicy('testpassword0047!@')
+ pw_credits = pw_policy.check_password_complexity()
+ self.assertEqual(pw_credits, 22)
+
+ def test_password_policy_credits_very_strong(self):
+ Settings.PWD_POLICY_CHECK_COMPLEXITY_ENABLED = True
+ pw_policy = PasswordPolicy('testpassword#!$!@$')
+ pw_credits = pw_policy.check_password_complexity()
+ self.assertEqual(pw_credits, 30)
+
+ def test_password_policy_forbidden_words(self):
+ Settings.PWD_POLICY_CHECK_EXCLUSION_LIST_ENABLED = True
+ pw_policy = PasswordPolicy('!@$testdashboard#!$')
+ self.assertTrue(pw_policy.check_if_contains_forbidden_words())
+
+ def test_password_policy_forbidden_words_custom(self):
+ Settings.PWD_POLICY_CHECK_EXCLUSION_LIST_ENABLED = True
+ Settings.PWD_POLICY_EXCLUSION_LIST = 'foo,bar'
+ pw_policy = PasswordPolicy('foo123bar')
+ self.assertTrue(pw_policy.check_if_contains_forbidden_words())
+
+ def test_password_policy_sequential_chars(self):
+ Settings.PWD_POLICY_CHECK_SEQUENTIAL_CHARS_ENABLED = True
+ pw_policy = PasswordPolicy('!@$test123#!$')
+ self.assertTrue(pw_policy.check_if_sequential_characters())
+
+ def test_password_policy_repetitive_chars(self):
+ Settings.PWD_POLICY_CHECK_REPETITIVE_CHARS_ENABLED = True
+ pw_policy = PasswordPolicy('!@$testfooo#!$')
+ self.assertTrue(pw_policy.check_if_repetitive_characters())
+
+ def test_password_policy_contain_username(self):
+ Settings.PWD_POLICY_CHECK_USERNAME_ENABLED = True
+ pw_policy = PasswordPolicy('%admin135)', 'admin')
+ self.assertTrue(pw_policy.check_if_contains_username())
+
+ def test_password_policy_is_old_pwd(self):
+ Settings.PWD_POLICY_CHECK_OLDPWD_ENABLED = True
+ pw_policy = PasswordPolicy('foo', old_password='foo')
+ self.assertTrue(pw_policy.check_is_old_password())
diff --git a/src/pybind/mgr/dashboard/tests/test_api_auditing.py b/src/pybind/mgr/dashboard/tests/test_api_auditing.py
new file mode 100644
index 000000000..854d76468
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_api_auditing.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import json
+import re
+
+try:
+ import mock
+except ImportError:
+ import unittest.mock as mock
+
+from .. import mgr
+from ..controllers import RESTController, Router
+from ..tests import ControllerTestCase, KVStoreMockMixin
+
+
+# pylint: disable=W0613
+@Router('/foo', secure=False)
+class FooResource(RESTController):
+ def create(self, password):
+ pass
+
+ def get(self, key):
+ pass
+
+ def delete(self, key):
+ pass
+
+ def set(self, key, password, secret_key=None):
+ pass
+
+
+class ApiAuditingTest(ControllerTestCase, KVStoreMockMixin):
+
+ _request_logging = True
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([FooResource])
+
+ def setUp(self):
+ self.mock_kv_store()
+ mgr.cluster_log = mock.Mock()
+ mgr.set_module_option('AUDIT_API_ENABLED', True)
+ mgr.set_module_option('AUDIT_API_LOG_PAYLOAD', True)
+
+ def _validate_cluster_log_msg(self, path, method, user, params):
+ channel, _, msg = mgr.cluster_log.call_args_list[0][0]
+ self.assertEqual(channel, 'audit')
+ pattern = r'^\[DASHBOARD\] from=\'(.+)\' path=\'(.+)\' ' \
+ 'method=\'(.+)\' user=\'(.+)\' params=\'(.+)\'$'
+ m = re.match(pattern, msg)
+ self.assertEqual(m.group(2), path)
+ self.assertEqual(m.group(3), method)
+ self.assertEqual(m.group(4), user)
+ self.assertDictEqual(json.loads(m.group(5)), params)
+
+ def test_no_audit(self):
+ mgr.set_module_option('AUDIT_API_ENABLED', False)
+ self._delete('/foo/test1')
+ mgr.cluster_log.assert_not_called()
+
+ def test_no_payload(self):
+ mgr.set_module_option('AUDIT_API_LOG_PAYLOAD', False)
+ self._delete('/foo/test1')
+ _, _, msg = mgr.cluster_log.call_args_list[0][0]
+ self.assertNotIn('params=', msg)
+
+ def test_no_audit_get(self):
+ self._get('/foo/test1')
+ mgr.cluster_log.assert_not_called()
+
+ def test_audit_put(self):
+ self._put('/foo/test1', {'password': 'y', 'secret_key': 1234})
+ mgr.cluster_log.assert_called_once()
+ self._validate_cluster_log_msg('/foo/test1', 'PUT', 'None',
+ {'key': 'test1',
+ 'password': '***',
+ 'secret_key': '***'})
+
+ def test_audit_post(self):
+ with mock.patch('dashboard.services.auth.JwtManager.get_username',
+ return_value='hugo'):
+ self._post('/foo?password=1234')
+ mgr.cluster_log.assert_called_once()
+ self._validate_cluster_log_msg('/foo', 'POST', 'hugo',
+ {'password': '***'})
+
+ def test_audit_delete(self):
+ self._delete('/foo/test1')
+ mgr.cluster_log.assert_called_once()
+ self._validate_cluster_log_msg('/foo/test1', 'DELETE',
+ 'None', {'key': 'test1'})
diff --git a/src/pybind/mgr/dashboard/tests/test_auth.py b/src/pybind/mgr/dashboard/tests/test_auth.py
new file mode 100644
index 000000000..d9755de98
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_auth.py
@@ -0,0 +1,66 @@
+import unittest
+from unittest.mock import Mock, patch
+
+from .. import mgr
+from ..controllers.auth import Auth
+from ..services.auth import JwtManager
+from ..tests import ControllerTestCase
+
+mgr.get_module_option.return_value = JwtManager.JWT_TOKEN_TTL
+mgr.get_store.return_value = 'jwt_secret'
+mgr.ACCESS_CTRL_DB = Mock()
+mgr.ACCESS_CTRL_DB.get_attempt.return_value = 1
+
+
+class JwtManagerTest(unittest.TestCase):
+
+ def test_generate_token_and_decode(self):
+ mgr.get_module_option.return_value = JwtManager.JWT_TOKEN_TTL
+ mgr.get_store.return_value = 'jwt_secret'
+
+ token = JwtManager.gen_token('my-username')
+ self.assertIsInstance(token, str)
+ self.assertTrue(token)
+
+ decoded_token = JwtManager.decode_token(token)
+ self.assertIsInstance(decoded_token, dict)
+ self.assertEqual(decoded_token['iss'], 'ceph-dashboard')
+ self.assertEqual(decoded_token['username'], 'my-username')
+
+
+class AuthTest(ControllerTestCase):
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([Auth])
+
+ def test_request_not_authorized(self):
+ self.setup_controllers([Auth], cp_config={'tools.authenticate.on': True})
+ self._post('/api/auth/logout')
+ self.assertStatus(401)
+
+ @patch('dashboard.controllers.auth.JwtManager.gen_token', Mock(return_value='my-token'))
+ @patch('dashboard.controllers.auth.AuthManager.authenticate', Mock(return_value={
+ 'permissions': {'rgw': ['read']},
+ 'pwdExpirationDate': 1000000,
+ 'pwdUpdateRequired': False
+ }))
+ def test_login(self):
+ self._post('/api/auth', {'username': 'my-user', 'password': 'my-pass'})
+ self.assertStatus(201)
+ self.assertJsonBody({
+ 'token': 'my-token',
+ 'username': 'my-user',
+ 'permissions': {'rgw': ['read']},
+ 'pwdExpirationDate': 1000000,
+ 'sso': False,
+ 'pwdUpdateRequired': False
+ })
+
+ @patch('dashboard.controllers.auth.JwtManager', Mock())
+ def test_logout(self):
+ self._post('/api/auth/logout')
+ self.assertStatus(200)
+ self.assertJsonBody({
+ 'redirect_url': '#/login'
+ })
diff --git a/src/pybind/mgr/dashboard/tests/test_ceph_service.py b/src/pybind/mgr/dashboard/tests/test_ceph_service.py
new file mode 100644
index 000000000..440fc7121
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_ceph_service.py
@@ -0,0 +1,170 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=dangerous-default-value,too-many-public-methods
+from __future__ import absolute_import
+
+import logging
+import unittest
+from contextlib import contextmanager
+from unittest import mock
+
+import pytest
+
+from ..services.ceph_service import CephService
+
+
+class CephServiceTest(unittest.TestCase):
+ pools = [{
+ 'pool_name': 'good_pool',
+ 'pool': 1,
+ }, {
+ 'pool_name': 'bad_pool',
+ 'pool': 2,
+ 'flaky': 'option_x'
+ }]
+
+ def setUp(self):
+ # Mock get_pool_list
+ self.list_patch = mock.patch('dashboard.services.ceph_service.CephService.get_pool_list')
+ self.list = self.list_patch.start()
+ self.list.return_value = self.pools
+ # Mock mgr.get
+ self.mgr_patch = mock.patch('dashboard.mgr.get')
+ self.mgr = self.mgr_patch.start()
+ self.mgr.return_value = {
+ 'by_pool': {
+ '1': {'active+clean': 16},
+ '2': {'creating+incomplete': 16},
+ }
+ }
+ self.service = CephService()
+
+ def tearDown(self):
+ self.list_patch.stop()
+ self.mgr_patch.stop()
+
+ def test_get_pool_by_attribute_with_match(self):
+ self.assertEqual(self.service.get_pool_by_attribute('pool', 1), self.pools[0])
+ self.assertEqual(self.service.get_pool_by_attribute('pool_name', 'bad_pool'), self.pools[1])
+
+ def test_get_pool_by_attribute_without_a_match(self):
+ self.assertEqual(self.service.get_pool_by_attribute('pool', 3), None)
+ self.assertEqual(self.service.get_pool_by_attribute('not_there', 'sth'), None)
+
+ def test_get_pool_by_attribute_matching_a_not_always_set_attribute(self):
+ self.assertEqual(self.service.get_pool_by_attribute('flaky', 'option_x'), self.pools[1])
+
+ @mock.patch('dashboard.mgr.rados.pool_reverse_lookup', return_value='good_pool')
+ def test_get_pool_name_from_id_with_match(self, _mock):
+ self.assertEqual(self.service.get_pool_name_from_id(1), 'good_pool')
+
+ @mock.patch('dashboard.mgr.rados.pool_reverse_lookup', return_value=None)
+ def test_get_pool_name_from_id_without_match(self, _mock):
+ self.assertEqual(self.service.get_pool_name_from_id(3), None)
+
+ def test_get_pool_pg_status(self):
+ self.assertEqual(self.service.get_pool_pg_status('good_pool'), {'active+clean': 16})
+
+ def test_get_pg_status_without_match(self):
+ self.assertEqual(self.service.get_pool_pg_status('no-pool'), {})
+
+
+@contextmanager
+def mock_smart_data(data):
+ devices = [{'devid': devid} for devid in data]
+
+ def _get_smart_data(d):
+ return {d['devid']: data[d['devid']]}
+
+ with mock.patch.object(CephService, '_get_smart_data_by_device', side_effect=_get_smart_data), \
+ mock.patch.object(CephService, 'get_devices_by_host', return_value=devices), \
+ mock.patch.object(CephService, 'get_devices_by_daemon', return_value=devices):
+ yield
+
+
+@pytest.mark.parametrize(
+ "by,args,log",
+ [
+ ('host', ('osd0',), 'from host osd0'),
+ ('daemon', ('osd', '1'), 'with ID 1')
+ ]
+)
+def test_get_smart_data(caplog, by, args, log):
+ # pylint: disable=protected-access
+ expected_data = {
+ 'aaa': {'device': {'name': '/dev/sda'}},
+ 'bbb': {'device': {'name': '/dev/sdb'}},
+ }
+ with mock_smart_data(expected_data):
+ smart_data = getattr(CephService, 'get_smart_data_by_{}'.format(by))(*args)
+ getattr(CephService, 'get_devices_by_{}'.format(by)).assert_called_with(*args)
+ CephService._get_smart_data_by_device.assert_called()
+ assert smart_data == expected_data
+
+ with caplog.at_level(logging.DEBUG):
+ with mock_smart_data([]):
+ smart_data = getattr(CephService, 'get_smart_data_by_{}'.format(by))(*args)
+ getattr(CephService, 'get_devices_by_{}'.format(by)).assert_called_with(*args)
+ CephService._get_smart_data_by_device.assert_not_called()
+ assert smart_data == {}
+ assert log in caplog.text
+
+
+@mock.patch.object(CephService, 'send_command')
+def test_get_smart_data_by_device(send_command):
+ # pylint: disable=protected-access
+ device_id = 'Hitachi_HUA72201_JPW9K0N20D22SE'
+ osd_tree_payload = {'nodes':
+ [
+ {'name': 'osd.1', 'status': 'down'},
+ {'name': 'osd.2', 'status': 'up'},
+ {'name': 'osd.3', 'status': 'up'}
+ ]}
+ health_metrics_payload = {device_id: {'ata_apm': {'enabled': False}}}
+ side_effect = [osd_tree_payload, health_metrics_payload]
+
+ # Daemons associated: 1 osd down, 2 osd up.
+ send_command.side_effect = side_effect
+ smart_data = CephService._get_smart_data_by_device(
+ {'devid': device_id, 'daemons': ['osd.1', 'osd.2', 'osd.3']})
+ assert smart_data == health_metrics_payload
+ send_command.assert_has_calls([mock.call('mon', 'osd tree'),
+ mock.call('osd', 'smart', '2', devid=device_id)])
+
+ # Daemons associated: 1 osd down.
+ send_command.reset_mock()
+ send_command.side_effect = [osd_tree_payload]
+ smart_data = CephService._get_smart_data_by_device({'devid': device_id, 'daemons': ['osd.1']})
+ assert smart_data == {}
+ send_command.assert_has_calls([mock.call('mon', 'osd tree')])
+
+ # Daemons associated: 1 osd down, 1 mon.
+ send_command.reset_mock()
+ send_command.side_effect = side_effect
+ smart_data = CephService._get_smart_data_by_device(
+ {'devid': device_id, 'daemons': ['osd.1', 'mon.1']})
+ assert smart_data == health_metrics_payload
+ send_command.assert_has_calls([mock.call('mon', 'osd tree'),
+ mock.call('mon', 'device query-daemon-health-metrics',
+ who='mon.1')])
+
+ # Daemons associated: 1 mon.
+ send_command.reset_mock()
+ send_command.side_effect = side_effect
+ smart_data = CephService._get_smart_data_by_device({'devid': device_id, 'daemons': ['mon.1']})
+ assert smart_data == health_metrics_payload
+ send_command.assert_has_calls([mock.call('mon', 'osd tree'),
+ mock.call('mon', 'device query-daemon-health-metrics',
+ who='mon.1')])
+
+ # Daemons associated: 1 other (non-osd, non-mon).
+ send_command.reset_mock()
+ send_command.side_effect = [osd_tree_payload]
+ smart_data = CephService._get_smart_data_by_device({'devid': device_id, 'daemons': ['rgw.1']})
+ assert smart_data == {}
+ send_command.assert_has_calls([mock.call('mon', 'osd tree')])
+
+ # Daemons associated: no daemons.
+ send_command.reset_mock()
+ smart_data = CephService._get_smart_data_by_device({'devid': device_id, 'daemons': []})
+ assert smart_data == {}
+ send_command.assert_has_calls([])
diff --git a/src/pybind/mgr/dashboard/tests/test_cephfs.py b/src/pybind/mgr/dashboard/tests/test_cephfs.py
new file mode 100644
index 000000000..ae4253543
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_cephfs.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+from collections import defaultdict
+
+try:
+ from mock import Mock
+except ImportError:
+ from unittest.mock import patch, Mock
+
+from ..controllers.cephfs import CephFS
+from ..tests import ControllerTestCase
+
+
+class MetaDataMock(object):
+ def get(self, _x, _y):
+ return 'bar'
+
+
+def get_metadata_mock(key, meta_key):
+ return {
+ 'mds': {
+ None: None, # Unknown key
+ 'foo': MetaDataMock()
+ }[meta_key]
+ }[key]
+
+
+@patch('dashboard.mgr.get_metadata', Mock(side_effect=get_metadata_mock))
+class CephFsTest(ControllerTestCase):
+ cephFs = CephFS()
+
+ def test_append_of_mds_metadata_if_key_is_not_found(self):
+ mds_versions = defaultdict(list)
+ # pylint: disable=protected-access
+ self.cephFs._append_mds_metadata(mds_versions, None)
+ self.assertEqual(len(mds_versions), 0)
+
+ def test_append_of_mds_metadata_with_existing_metadata(self):
+ mds_versions = defaultdict(list)
+ # pylint: disable=protected-access
+ self.cephFs._append_mds_metadata(mds_versions, 'foo')
+ self.assertEqual(len(mds_versions), 1)
+ self.assertEqual(mds_versions['bar'], ['foo'])
diff --git a/src/pybind/mgr/dashboard/tests/test_controllers.py b/src/pybind/mgr/dashboard/tests/test_controllers.py
new file mode 100644
index 000000000..4b91b1103
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_controllers.py
@@ -0,0 +1,191 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from ..controllers import APIRouter, BaseController, Endpoint, RESTController, Router
+from ..tests import ControllerTestCase
+
+
+@Router("/btest/{key}", base_url="/ui", secure=False)
+class BTest(BaseController):
+ @Endpoint()
+ def test1(self, key, opt=1):
+ return {'key': key, 'opt': opt}
+
+ @Endpoint()
+ def test2(self, key, skey, opt=1):
+ return {'key': key, 'skey': skey, 'opt': opt}
+
+ @Endpoint(path="/foo/{skey}/test-3")
+ def test3(self, key, skey, opt=1):
+ return {'key': key, 'skey': skey, 'opt': opt}
+
+ @Endpoint('POST', path="/foo/{skey}/test-3", query_params=['opt'])
+ def test4(self, key, skey, data, opt=1):
+ return {'key': key, 'skey': skey, 'data': data, 'opt': opt}
+
+ @Endpoint('PUT', path_params=['skey'], query_params=['opt'])
+ def test5(self, key, skey, data1, data2=None, opt=1):
+ return {'key': key, 'skey': skey, 'data1': data1, 'data2': data2,
+ 'opt': opt}
+
+ @Endpoint('GET', json_response=False)
+ def test6(self, key, opt=1):
+ return "My Formatted string key={} opt={}".format(key, opt)
+
+ @Endpoint()
+ def __call__(self, key, opt=1):
+ return {'key': key, 'opt': opt}
+
+
+@APIRouter("/rtest/{key}", secure=False)
+class RTest(RESTController):
+ RESOURCE_ID = 'skey/ekey'
+
+ def list(self, key, opt=1):
+ return {'key': key, 'opt': opt}
+
+ def create(self, key, data1, data2=None):
+ return {'key': key, 'data1': data1, 'data2': data2}
+
+ def get(self, key, skey, ekey, opt=1):
+ return {'key': key, 'skey': skey, 'ekey': ekey, 'opt': opt}
+
+ def set(self, key, skey, ekey, data):
+ return {'key': key, 'skey': skey, 'ekey': ekey, 'data': data}
+
+ def delete(self, key, skey, ekey, opt=1):
+ pass
+
+ def bulk_set(self, key, data1, data2=None):
+ return {'key': key, 'data1': data1, 'data2': data2}
+
+ def bulk_delete(self, key, opt=1):
+ pass
+
+ @RESTController.Collection('POST')
+ def cmethod(self, key, data):
+ return {'key': key, 'data': data}
+
+ @RESTController.Resource('GET')
+ def rmethod(self, key, skey, ekey, opt=1):
+ return {'key': key, 'skey': skey, 'ekey': ekey, 'opt': opt}
+
+
+@Router("/", secure=False)
+class Root(BaseController):
+ @Endpoint(json_response=False)
+ def __call__(self):
+ return "<html></html>"
+
+
+class ControllersTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([BTest, RTest], "/test")
+
+ def test_1(self):
+ self._get('/test/ui/btest/{}/test1?opt=3'.format(100))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'opt': '3'})
+
+ def test_2(self):
+ self._get('/test/ui/btest/{}/test2/{}?opt=3'.format(100, 200))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'skey': '200', 'opt': '3'})
+
+ def test_3(self):
+ self._get('/test/ui/btest/{}/foo/{}/test-3?opt=3'.format(100, 200))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'skey': '200', 'opt': '3'})
+
+ def test_4(self):
+ self._post('/test/ui/btest/{}/foo/{}/test-3?opt=3'.format(100, 200),
+ {'data': 30})
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'skey': '200', 'data': 30,
+ 'opt': '3'})
+
+ def test_5(self):
+ self._put('/test/ui/btest/{}/test5/{}?opt=3'.format(100, 200),
+ {'data1': 40, 'data2': "hello"})
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'skey': '200', 'data1': 40,
+ 'data2': "hello", 'opt': '3'})
+
+ def test_6(self):
+ self._get('/test/ui/btest/{}/test6'.format(100))
+ self.assertStatus(200)
+ self.assertBody("My Formatted string key=100 opt=1")
+
+ def test_7(self):
+ self._get('/test/ui/btest/{}?opt=3'.format(100))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'opt': '3'})
+
+ def test_rest_list(self):
+ self._get('/test/api/rtest/{}?opt=2'.format(300))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'opt': '2'})
+
+ def test_rest_create(self):
+ self._post('/test/api/rtest/{}'.format(300),
+ {'data1': 20, 'data2': True})
+ self.assertStatus(201)
+ self.assertJsonBody({'key': '300', 'data1': 20, 'data2': True})
+
+ def test_rest_get(self):
+ self._get('/test/api/rtest/{}/{}/{}?opt=3'.format(300, 1, 2))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'skey': '1', 'ekey': '2',
+ 'opt': '3'})
+
+ def test_rest_set(self):
+ self._put('/test/api/rtest/{}/{}/{}'.format(300, 1, 2),
+ {'data': 40})
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'skey': '1', 'ekey': '2',
+ 'data': 40})
+
+ def test_rest_delete(self):
+ self._delete('/test/api/rtest/{}/{}/{}?opt=3'.format(300, 1, 2))
+ self.assertStatus(204)
+
+ def test_rest_bulk_set(self):
+ self._put('/test/api/rtest/{}'.format(300),
+ {'data1': 20, 'data2': True})
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'data1': 20, 'data2': True})
+
+ self._put('/test/api/rtest/{}'.format(400),
+ {'data1': 20, 'data2': ['one', 'two', 'three']})
+ self.assertStatus(200)
+ self.assertJsonBody({
+ 'key': '400',
+ 'data1': 20,
+ 'data2': ['one', 'two', 'three'],
+ })
+
+ def test_rest_bulk_delete(self):
+ self._delete('/test/api/rtest/{}?opt=2'.format(300))
+ self.assertStatus(204)
+
+ def test_rest_collection(self):
+ self._post('/test/api/rtest/{}/cmethod'.format(300), {'data': 30})
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'data': 30})
+
+ def test_rest_resource(self):
+ self._get('/test/api/rtest/{}/{}/{}/rmethod?opt=4'.format(300, 2, 3))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'skey': '2', 'ekey': '3',
+ 'opt': '4'})
+
+
+class RootControllerTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([Root])
+
+ def test_index(self):
+ self._get("/")
+ self.assertBody("<html></html>")
diff --git a/src/pybind/mgr/dashboard/tests/test_daemon.py b/src/pybind/mgr/dashboard/tests/test_daemon.py
new file mode 100644
index 000000000..2008c8630
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_daemon.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+
+from ..controllers._version import APIVersion
+from ..controllers.daemon import Daemon
+from ..tests import ControllerTestCase, patch_orch
+
+
+class DaemonTest(ControllerTestCase):
+
+ URL_DAEMON = '/api/daemon'
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([Daemon])
+
+ def test_daemon_action(self):
+ msg = "Scheduled to stop crash.b78cd1164a1b on host 'hostname'"
+
+ with patch_orch(True) as fake_client:
+ fake_client.daemons.action.return_value = msg
+ payload = {
+ 'action': 'restart',
+ 'container_image': None
+ }
+ self._put(f'{self.URL_DAEMON}/crash.b78cd1164a1b', payload, version=APIVersion(0, 1))
+ self.assertJsonBody(msg)
+ self.assertStatus(200)
+
+ def test_daemon_invalid_action(self):
+ payload = {
+ 'action': 'invalid',
+ 'container_image': None
+ }
+ with patch_orch(True):
+ self._put(f'{self.URL_DAEMON}/crash.b78cd1164a1b', payload, version=APIVersion(0, 1))
+ self.assertJsonBody({
+ 'detail': 'Daemon action "invalid" is either not valid or not supported.',
+ 'code': 'invalid_daemon_action',
+ 'component': None
+ })
+ self.assertStatus(400)
diff --git a/src/pybind/mgr/dashboard/tests/test_docs.py b/src/pybind/mgr/dashboard/tests/test_docs.py
new file mode 100644
index 000000000..5291edb3b
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_docs.py
@@ -0,0 +1,125 @@
+# # -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from ..api.doc import SchemaType
+from ..controllers import ENDPOINT_MAP, APIDoc, APIRouter, Endpoint, EndpointDoc, RESTController
+from ..controllers._version import APIVersion
+from ..controllers.docs import Docs
+from ..tests import ControllerTestCase
+
+
+# Dummy controller and endpoint that can be assigned with @EndpointDoc and @GroupDoc
+@APIDoc("Group description", group="FooGroup")
+@APIRouter("/doctest/", secure=False)
+class DecoratedController(RESTController):
+ RESOURCE_ID = 'doctest'
+
+ @EndpointDoc(
+ description="Endpoint description",
+ group="BarGroup",
+ parameters={
+ 'parameter': (int, "Description of parameter"),
+ },
+ responses={
+ 200: [{
+ 'my_prop': (str, '200 property desc.')
+ }],
+ 202: {
+ 'my_prop': (str, '202 property desc.')
+ },
+ },
+ )
+ @Endpoint(json_response=False)
+ @RESTController.Resource('PUT', version=APIVersion(0, 1))
+ def decorated_func(self, parameter):
+ pass
+
+ @RESTController.MethodMap(version=APIVersion(0, 1))
+ def list(self):
+ pass
+
+
+# To assure functionality of @EndpointDoc, @GroupDoc
+class DocDecoratorsTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([DecoratedController, Docs], "/test")
+
+ def test_group_info_attr(self):
+ test_ctrl = DecoratedController()
+ self.assertTrue(hasattr(test_ctrl, 'doc_info'))
+ self.assertIn('tag_descr', test_ctrl.doc_info)
+ self.assertIn('tag', test_ctrl.doc_info)
+
+ def test_endpoint_info_attr(self):
+ test_ctrl = DecoratedController()
+ test_endpoint = test_ctrl.decorated_func
+ self.assertTrue(hasattr(test_endpoint, 'doc_info'))
+ self.assertIn('summary', test_endpoint.doc_info)
+ self.assertIn('tag', test_endpoint.doc_info)
+ self.assertIn('parameters', test_endpoint.doc_info)
+ self.assertIn('response', test_endpoint.doc_info)
+
+
+# To assure functionality of Docs.py
+# pylint: disable=protected-access
+class DocsTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ ENDPOINT_MAP.clear()
+ cls.setup_controllers([DecoratedController, Docs], "/test")
+
+ def test_type_to_str(self):
+ self.assertEqual(Docs()._type_to_str(str), str(SchemaType.STRING))
+ self.assertEqual(Docs()._type_to_str(int), str(SchemaType.INTEGER))
+ self.assertEqual(Docs()._type_to_str(bool), str(SchemaType.BOOLEAN))
+ self.assertEqual(Docs()._type_to_str(list), str(SchemaType.ARRAY))
+ self.assertEqual(Docs()._type_to_str(tuple), str(SchemaType.ARRAY))
+ self.assertEqual(Docs()._type_to_str(float), str(SchemaType.NUMBER))
+ self.assertEqual(Docs()._type_to_str(object), str(SchemaType.OBJECT))
+ self.assertEqual(Docs()._type_to_str(None), str(SchemaType.OBJECT))
+
+ def test_gen_paths(self):
+ outcome = Docs().gen_paths(False)['/api/doctest//{doctest}/decorated_func']['put']
+ self.assertIn('tags', outcome)
+ self.assertIn('summary', outcome)
+ self.assertIn('parameters', outcome)
+ self.assertIn('responses', outcome)
+
+ expected_response_content = {
+ '200': {
+ APIVersion(0, 1).to_mime_type(): {
+ 'schema': {'type': 'array',
+ 'items': {'type': 'object', 'properties': {
+ 'my_prop': {
+ 'type': 'string',
+ 'description': '200 property desc.'}}},
+ 'required': ['my_prop']}}},
+ '202': {
+ APIVersion(0, 1).to_mime_type(): {
+ 'schema': {'type': 'object',
+ 'properties': {'my_prop': {
+ 'type': 'string',
+ 'description': '202 property desc.'}},
+ 'required': ['my_prop']}}
+ }
+ }
+ # Check that a schema of type 'array' is received in the response.
+ self.assertEqual(expected_response_content['200'], outcome['responses']['200']['content'])
+ # Check that a schema of type 'object' is received in the response.
+ self.assertEqual(expected_response_content['202'], outcome['responses']['202']['content'])
+
+ def test_gen_method_paths(self):
+ outcome = Docs().gen_paths(False)['/api/doctest/']['get']
+
+ self.assertEqual({APIVersion(0, 1).to_mime_type(): {'type': 'object'}},
+ outcome['responses']['200']['content'])
+
+ def test_gen_paths_all(self):
+ paths = Docs().gen_paths(False)
+ for key in paths:
+ self.assertTrue(any(base in key.split('/')[1] for base in ['api', 'ui-api']))
+
+ def test_gen_tags(self):
+ outcome = Docs._gen_tags(False)
+ self.assertEqual([{'description': 'Group description', 'name': 'FooGroup'}], outcome)
diff --git a/src/pybind/mgr/dashboard/tests/test_erasure_code_profile.py b/src/pybind/mgr/dashboard/tests/test_erasure_code_profile.py
new file mode 100644
index 000000000..d1b032a51
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_erasure_code_profile.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+
+from .. import mgr
+from ..controllers.erasure_code_profile import ErasureCodeProfile
+from ..tests import ControllerTestCase
+
+
+class ErasureCodeProfileTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ mgr.get.side_effect = lambda key: {
+ 'osd_map': {
+ 'erasure_code_profiles': {
+ 'test': {
+ 'k': '2',
+ 'm': '1'
+ }
+ }
+ },
+ 'health': {'json': '{"status": 1}'},
+ 'fs_map': {'filesystems': []},
+
+ }[key]
+ cls.setup_controllers([ErasureCodeProfile])
+
+ def test_list(self):
+ self._get('/api/erasure_code_profile')
+ self.assertStatus(200)
+ self.assertJsonBody([{'k': 2, 'm': 1, 'name': 'test'}])
diff --git a/src/pybind/mgr/dashboard/tests/test_exceptions.py b/src/pybind/mgr/dashboard/tests/test_exceptions.py
new file mode 100644
index 000000000..2a9e840b8
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_exceptions.py
@@ -0,0 +1,161 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import time
+
+import rados
+
+from ..controllers import Endpoint, RESTController, Router, Task
+from ..services.ceph_service import SendCommandError
+from ..services.exception import handle_rados_error, \
+ handle_send_command_error, serialize_dashboard_exception
+from ..tests import ControllerTestCase
+from ..tools import NotificationQueue, TaskManager, ViewCache
+
+
+# pylint: disable=W0613
+@Router('foo', secure=False)
+class FooResource(RESTController):
+
+ @Endpoint()
+ @handle_rados_error('foo')
+ def no_exception(self, param1, param2):
+ return [param1, param2]
+
+ @Endpoint()
+ @handle_rados_error('foo')
+ def error_foo_controller(self):
+ raise rados.OSError('hi', errno=-42)
+
+ @Endpoint()
+ @handle_send_command_error('foo')
+ def error_send_command(self):
+ raise SendCommandError('hi', 'prefix', {}, -42)
+
+ @Endpoint()
+ def error_generic(self):
+ raise rados.Error('hi')
+
+ @Endpoint()
+ def vc_no_data(self):
+ @ViewCache(timeout=0)
+ def _no_data():
+ time.sleep(0.2)
+
+ _no_data()
+ assert False
+
+ @handle_rados_error('foo')
+ @Endpoint()
+ def vc_exception(self):
+ @ViewCache(timeout=10)
+ def _raise():
+ raise rados.OSError('hi', errno=-42)
+
+ _raise()
+ assert False
+
+ @Endpoint()
+ def internal_server_error(self):
+ return 1/0
+
+ @handle_send_command_error('foo')
+ def list(self):
+ raise SendCommandError('list', 'prefix', {}, -42)
+
+ @Endpoint()
+ @Task('task_exceptions/task_exception', {1: 2}, 1.0,
+ exception_handler=serialize_dashboard_exception)
+ @handle_rados_error('foo')
+ def task_exception(self):
+ raise rados.OSError('hi', errno=-42)
+
+ @Endpoint()
+ def wait_task_exception(self):
+ ex, _ = TaskManager.list('task_exceptions/task_exception')
+ return bool(len(ex))
+
+
+# pylint: disable=C0102
+class Root(object):
+ foo = FooResource()
+
+
+class RESTControllerTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ NotificationQueue.start_queue()
+ TaskManager.init()
+ cls.setup_controllers([FooResource])
+
+ @classmethod
+ def tearDownClass(cls):
+ NotificationQueue.stop()
+
+ def test_no_exception(self):
+ self._get('/foo/no_exception/a/b')
+ self.assertStatus(200)
+ self.assertJsonBody(
+ ['a', 'b']
+ )
+
+ def test_error_foo_controller(self):
+ self._get('/foo/error_foo_controller')
+ self.assertStatus(400)
+ self.assertJsonBody(
+ {'detail': '[errno -42] hi', 'code': "42", 'component': 'foo'}
+ )
+
+ def test_error_send_command(self):
+ self._get('/foo/error_send_command')
+ self.assertStatus(400)
+ self.assertJsonBody(
+ {'detail': '[errno -42] hi', 'code': "42", 'component': 'foo'}
+ )
+
+ def test_error_send_command_list(self):
+ self._get('/foo/')
+ self.assertStatus(400)
+ self.assertJsonBody(
+ {'detail': '[errno -42] list', 'code': "42", 'component': 'foo'}
+ )
+
+ def test_error_foo_generic(self):
+ self._get('/foo/error_generic')
+ self.assertJsonBody({'detail': 'hi', 'code': 'Error', 'component': None})
+ self.assertStatus(400)
+
+ def test_viewcache_no_data(self):
+ self._get('/foo/vc_no_data')
+ self.assertStatus(200)
+ self.assertJsonBody({'status': ViewCache.VALUE_NONE, 'value': None})
+
+ def test_viewcache_exception(self):
+ self._get('/foo/vc_exception')
+ self.assertStatus(400)
+ self.assertJsonBody(
+ {'detail': '[errno -42] hi', 'code': "42", 'component': 'foo'}
+ )
+
+ def test_task_exception(self):
+ self._get('/foo/task_exception')
+ self.assertStatus(400)
+ self.assertJsonBody(
+ {'detail': '[errno -42] hi', 'code': "42", 'component': 'foo',
+ 'task': {'name': 'task_exceptions/task_exception', 'metadata': {'1': 2}}}
+ )
+
+ self._get('/foo/wait_task_exception')
+ while self.json_body():
+ time.sleep(0.5)
+ self._get('/foo/wait_task_exception')
+
+ def test_internal_server_error(self):
+ self._get('/foo/internal_server_error')
+ self.assertStatus(500)
+ self.assertIn('unexpected condition', self.json_body()['detail'])
+
+ def test_404(self):
+ self._get('/foonot_found')
+ self.assertStatus(404)
+ self.assertIn('detail', self.json_body())
diff --git a/src/pybind/mgr/dashboard/tests/test_feature_toggles.py b/src/pybind/mgr/dashboard/tests/test_feature_toggles.py
new file mode 100644
index 000000000..dcc41b25e
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_feature_toggles.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import unittest
+
+try:
+ from mock import Mock, patch
+except ImportError:
+ from unittest.mock import Mock, patch
+
+from ..plugins.feature_toggles import Actions, Features, FeatureToggles
+from ..tests import KVStoreMockMixin
+
+
+class SettingsTest(unittest.TestCase, KVStoreMockMixin):
+ @classmethod
+ def setUpClass(cls):
+ cls.mock_kv_store()
+ cls.CONFIG_KEY_DICT['url_prefix'] = ''
+
+ # Mock MODULE_OPTIONS
+ from .. import mgr
+ cls.mgr = mgr
+
+ # Populate real endpoint map
+ from ..controllers import BaseController
+ cls.controllers = BaseController.load_controllers()
+
+ # Initialize FeatureToggles plugin
+ cls.plugin = FeatureToggles()
+ cls.CONFIG_KEY_DICT.update(
+ {k['name']: k['default'] for k in cls.plugin.get_options()})
+ cls.plugin.setup()
+
+ def test_filter_request_when_all_features_enabled(self):
+ """
+ This test iterates over all the registered endpoints to ensure that, with default
+ feature toggles, none is disabled.
+ """
+ import cherrypy
+
+ request = Mock()
+ for controller in self.controllers:
+ request.path_info = controller.get_path()
+ try:
+ self.plugin.filter_request_before_handler(request)
+ except cherrypy.HTTPError:
+ self.fail("Request filtered {} and it shouldn't".format(
+ request.path_info))
+
+ def test_filter_request_when_some_feature_enabled(self):
+ """
+ This test focuses on a single feature and checks whether it's actually
+ disabled
+ """
+ import cherrypy
+
+ self.plugin.register_commands()['handle_command'](
+ self.mgr, Actions.DISABLE, [Features.CEPHFS])
+
+ with patch.object(self.plugin, '_get_feature_from_request',
+ return_value=Features.CEPHFS):
+ with self.assertRaises(cherrypy.HTTPError):
+ request = Mock()
+ self.plugin.filter_request_before_handler(request)
diff --git a/src/pybind/mgr/dashboard/tests/test_grafana.py b/src/pybind/mgr/dashboard/tests/test_grafana.py
new file mode 100644
index 000000000..b822020d8
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_grafana.py
@@ -0,0 +1,133 @@
+import json
+import unittest
+
+try:
+ from mock import patch
+except ImportError:
+ from unittest.mock import patch
+
+from requests import RequestException
+
+from ..controllers.grafana import Grafana
+from ..grafana import GrafanaRestClient
+from ..settings import Settings
+from ..tests import ControllerTestCase, KVStoreMockMixin
+
+
+class GrafanaTest(ControllerTestCase, KVStoreMockMixin):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([Grafana])
+
+ def setUp(self):
+ self.mock_kv_store()
+
+ @staticmethod
+ def server_settings(
+ url='http://localhost:3000',
+ user='admin',
+ password='admin',
+ ):
+ if url is not None:
+ Settings.GRAFANA_API_URL = url
+ if user is not None:
+ Settings.GRAFANA_API_USERNAME = user
+ if password is not None:
+ Settings.GRAFANA_API_PASSWORD = password
+
+ def test_url(self):
+ self.server_settings()
+ self._get('/api/grafana/url')
+ self.assertStatus(200)
+ self.assertJsonBody({'instance': 'http://localhost:3000'})
+
+ @patch('dashboard.controllers.grafana.GrafanaRestClient.url_validation')
+ def test_validation_endpoint_returns(self, url_validation):
+ """
+ The point of this test is to see that `validation` is an active endpoint that returns a 200
+ status code.
+ """
+ url_validation.return_value = b'404'
+ self.server_settings()
+ self._get('/api/grafana/validation/foo')
+ self.assertStatus(200)
+ self.assertBody(b'"404"')
+
+ @patch('dashboard.controllers.grafana.GrafanaRestClient.url_validation')
+ def test_validation_endpoint_fails(self, url_validation):
+ url_validation.side_effect = RequestException
+ self.server_settings()
+ self._get('/api/grafana/validation/bar')
+ self.assertStatus(400)
+ self.assertJsonBody({'detail': '', 'code': 'Error', 'component': 'grafana'})
+
+ def test_dashboards_unavailable_no_url(self):
+ self.server_settings(url="")
+ self._post('/api/grafana/dashboards')
+ self.assertStatus(500)
+
+ @patch('dashboard.controllers.grafana.GrafanaRestClient.push_dashboard')
+ def test_dashboards_unavailable_no_user(self, pd):
+ pd.side_effect = RequestException
+ self.server_settings(user="")
+ self._post('/api/grafana/dashboards')
+ self.assertStatus(500)
+
+ def test_dashboards_unavailable_no_password(self):
+ self.server_settings(password="")
+ self._post('/api/grafana/dashboards')
+ self.assertStatus(500)
+
+
+class GrafanaRestClientTest(unittest.TestCase, KVStoreMockMixin):
+ headers = {
+ 'Accept': 'application/json',
+ 'Content-Type': 'application/json',
+ }
+ payload = json.dumps({
+ 'dashboard': 'foo',
+ 'overwrite': True
+ })
+
+ def setUp(self):
+ self.mock_kv_store()
+ Settings.GRAFANA_API_URL = 'https://foo/bar'
+ Settings.GRAFANA_API_USERNAME = 'xyz'
+ Settings.GRAFANA_API_PASSWORD = 'abc'
+ Settings.GRAFANA_API_SSL_VERIFY = True
+
+ def test_ssl_verify_url_validation(self):
+ with patch('requests.request') as mock_request:
+ rest_client = GrafanaRestClient()
+ rest_client.url_validation('FOO', Settings.GRAFANA_API_URL)
+ mock_request.assert_called_with('FOO', Settings.GRAFANA_API_URL,
+ verify=True)
+
+ def test_no_ssl_verify_url_validation(self):
+ Settings.GRAFANA_API_SSL_VERIFY = False
+ with patch('requests.request') as mock_request:
+ rest_client = GrafanaRestClient()
+ rest_client.url_validation('BAR', Settings.GRAFANA_API_URL)
+ mock_request.assert_called_with('BAR', Settings.GRAFANA_API_URL,
+ verify=False)
+
+ def test_ssl_verify_push_dashboard(self):
+ with patch('requests.post') as mock_request:
+ rest_client = GrafanaRestClient()
+ rest_client.push_dashboard('foo')
+ mock_request.assert_called_with(
+ Settings.GRAFANA_API_URL + '/api/dashboards/db',
+ auth=(Settings.GRAFANA_API_USERNAME,
+ Settings.GRAFANA_API_PASSWORD),
+ data=self.payload, headers=self.headers, verify=True)
+
+ def test_no_ssl_verify_push_dashboard(self):
+ Settings.GRAFANA_API_SSL_VERIFY = False
+ with patch('requests.post') as mock_request:
+ rest_client = GrafanaRestClient()
+ rest_client.push_dashboard('foo')
+ mock_request.assert_called_with(
+ Settings.GRAFANA_API_URL + '/api/dashboards/db',
+ auth=(Settings.GRAFANA_API_USERNAME,
+ Settings.GRAFANA_API_PASSWORD),
+ data=self.payload, headers=self.headers, verify=False)
diff --git a/src/pybind/mgr/dashboard/tests/test_home.py b/src/pybind/mgr/dashboard/tests/test_home.py
new file mode 100644
index 000000000..0b7886260
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_home.py
@@ -0,0 +1,74 @@
+from __future__ import absolute_import
+
+import logging
+import os
+
+try:
+ import mock
+except ImportError:
+ import unittest.mock as mock
+
+from .. import mgr
+from ..controllers.home import HomeController, LanguageMixin
+from ..tests import ControllerTestCase, FakeFsMixin
+
+logger = logging.getLogger()
+
+
+class HomeTest(ControllerTestCase, FakeFsMixin):
+ @classmethod
+ def setup_server(cls):
+ frontend_path = mgr.get_frontend_path()
+ cls.fs.reset()
+ cls.fs.create_dir(frontend_path)
+ cls.fs.create_file(
+ os.path.join(frontend_path, '..', 'package.json'),
+ contents='{"config":{"locale": "en"}}')
+ with mock.patch(cls.builtins_open, new=cls.f_open),\
+ mock.patch('os.listdir', new=cls.f_os.listdir):
+ lang = LanguageMixin()
+ cls.fs.create_file(
+ os.path.join(lang.DEFAULT_LANGUAGE_PATH, 'index.html'),
+ contents='<!doctype html><html lang="en"><body></body></html>')
+ cls.setup_controllers([HomeController])
+
+ @mock.patch(FakeFsMixin.builtins_open, new=FakeFsMixin.f_open)
+ @mock.patch('os.stat', new=FakeFsMixin.f_os.stat)
+ @mock.patch('os.listdir', new=FakeFsMixin.f_os.listdir)
+ def test_home_default_lang(self):
+ self._get('/')
+ self.assertStatus(200)
+ logger.info(self.body)
+ self.assertIn('<html lang="en">', self.body.decode('utf-8'))
+
+ @mock.patch(FakeFsMixin.builtins_open, new=FakeFsMixin.f_open)
+ @mock.patch('os.stat', new=FakeFsMixin.f_os.stat)
+ @mock.patch('os.listdir', new=FakeFsMixin.f_os.listdir)
+ def test_home_uplevel_check(self):
+ self._get('/../../../../../../etc/shadow')
+ self.assertStatus(403)
+
+ @mock.patch(FakeFsMixin.builtins_open, new=FakeFsMixin.f_open)
+ @mock.patch('os.stat', new=FakeFsMixin.f_os.stat)
+ @mock.patch('os.listdir', new=FakeFsMixin.f_os.listdir)
+ def test_home_en(self):
+ self._get('/', headers=[('Accept-Language', 'en-US')])
+ self.assertStatus(200)
+ logger.info(self.body)
+ self.assertIn('<html lang="en">', self.body.decode('utf-8'))
+
+ @mock.patch(FakeFsMixin.builtins_open, new=FakeFsMixin.f_open)
+ @mock.patch('os.stat', new=FakeFsMixin.f_os.stat)
+ @mock.patch('os.listdir', new=FakeFsMixin.f_os.listdir)
+ def test_home_non_supported_lang(self):
+ self._get('/', headers=[('Accept-Language', 'NO-NO')])
+ self.assertStatus(200)
+ logger.info(self.body)
+ self.assertIn('<html lang="en">', self.body.decode('utf-8'))
+
+ @mock.patch(FakeFsMixin.builtins_open, new=FakeFsMixin.f_open)
+ @mock.patch('os.stat', new=FakeFsMixin.f_os.stat)
+ @mock.patch('os.listdir', new=FakeFsMixin.f_os.listdir)
+ def test_home_multiple_subtags_lang(self):
+ self._get('/', headers=[('Accept-Language', 'zh-Hans-CN')])
+ self.assertStatus(200)
diff --git a/src/pybind/mgr/dashboard/tests/test_host.py b/src/pybind/mgr/dashboard/tests/test_host.py
new file mode 100644
index 000000000..0f55ce522
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_host.py
@@ -0,0 +1,509 @@
+import unittest
+from unittest import mock
+
+from orchestrator import HostSpec
+
+from .. import mgr
+from ..controllers._version import APIVersion
+from ..controllers.host import Host, HostUi, get_device_osd_map, get_hosts, get_inventories
+from ..tests import ControllerTestCase, patch_orch
+from ..tools import NotificationQueue, TaskManager
+
+
+class HostControllerTest(ControllerTestCase):
+ URL_HOST = '/api/host'
+
+ @classmethod
+ def setup_server(cls):
+ NotificationQueue.start_queue()
+ TaskManager.init()
+ cls.setup_controllers([Host])
+
+ @classmethod
+ def tearDownClass(cls):
+ NotificationQueue.stop()
+
+ @mock.patch('dashboard.controllers.host.get_hosts')
+ def test_host_list_with_sources(self, mock_get_hosts):
+ hosts = [{
+ 'hostname': 'host-0',
+ 'sources': {
+ 'ceph': True,
+ 'orchestrator': False
+ }
+ }, {
+ 'hostname': 'host-1',
+ 'sources': {
+ 'ceph': False,
+ 'orchestrator': True
+ }
+ }, {
+ 'hostname': 'host-2',
+ 'sources': {
+ 'ceph': True,
+ 'orchestrator': True
+ }
+ }]
+
+ def _get_hosts(sources=None):
+ if sources == 'ceph':
+ return hosts[0]
+ if sources == 'orchestrator':
+ return hosts[1:]
+ if sources == 'ceph, orchestrator':
+ return hosts[2]
+ return hosts
+
+ mock_get_hosts.side_effect = _get_hosts
+
+ self._get(self.URL_HOST, version=APIVersion(1, 1))
+ self.assertStatus(200)
+ self.assertJsonBody(hosts)
+
+ self._get('{}?sources=ceph'.format(self.URL_HOST), version=APIVersion(1, 1))
+ self.assertStatus(200)
+ self.assertJsonBody(hosts[0])
+
+ self._get('{}?sources=orchestrator'.format(self.URL_HOST), version=APIVersion(1, 1))
+ self.assertStatus(200)
+ self.assertJsonBody(hosts[1:])
+
+ self._get('{}?sources=ceph,orchestrator'.format(self.URL_HOST), version=APIVersion(1, 1))
+ self.assertStatus(200)
+ self.assertJsonBody(hosts)
+
+ @mock.patch('dashboard.controllers.host.get_hosts')
+ def test_host_list_with_facts(self, mock_get_hosts):
+ hosts_without_facts = [{
+ 'hostname': 'host-0',
+ 'sources': {
+ 'ceph': True,
+ 'orchestrator': False
+ }
+ }, {
+ 'hostname': 'host-1',
+ 'sources': {
+ 'ceph': False,
+ 'orchestrator': True
+ }
+ }]
+
+ hosts_facts = [{
+ 'hostname': 'host-0',
+ 'cpu_count': 1,
+ 'memory_total_kb': 1024
+ }, {
+ 'hostname': 'host-1',
+ 'cpu_count': 2,
+ 'memory_total_kb': 1024
+ }]
+
+ hosts_with_facts = [{
+ 'hostname': 'host-0',
+ 'sources': {
+ 'ceph': True,
+ 'orchestrator': False
+ },
+ 'cpu_count': 1,
+ 'memory_total_kb': 1024
+ }, {
+ 'hostname': 'host-1',
+ 'sources': {
+ 'ceph': False,
+ 'orchestrator': True
+ },
+ 'cpu_count': 2,
+ 'memory_total_kb': 1024
+ }]
+ # test with orchestrator available
+ with patch_orch(True, hosts=hosts_without_facts) as fake_client:
+ mock_get_hosts.return_value = hosts_without_facts
+ fake_client.hosts.get_facts.return_value = hosts_facts
+ # test with ?facts=true
+ self._get('{}?facts=true'.format(self.URL_HOST), version=APIVersion(1, 1))
+ self.assertStatus(200)
+ self.assertHeader('Content-Type',
+ 'application/vnd.ceph.api.v1.1+json')
+ self.assertJsonBody(hosts_with_facts)
+
+ # test with ?facts=false
+ self._get('{}?facts=false'.format(self.URL_HOST), version=APIVersion(1, 1))
+ self.assertStatus(200)
+ self.assertHeader('Content-Type',
+ 'application/vnd.ceph.api.v1.1+json')
+ self.assertJsonBody(hosts_without_facts)
+
+ # test with orchestrator available but orch backend!=cephadm
+ with patch_orch(True, missing_features=['get_facts']) as fake_client:
+ mock_get_hosts.return_value = hosts_without_facts
+ # test with ?facts=true
+ self._get('{}?facts=true'.format(self.URL_HOST), version=APIVersion(1, 1))
+ self.assertStatus(400)
+
+ # test with no orchestrator available
+ with patch_orch(False):
+ mock_get_hosts.return_value = hosts_without_facts
+
+ # test with ?facts=true
+ self._get('{}?facts=true'.format(self.URL_HOST), version=APIVersion(1, 1))
+ self.assertStatus(400)
+
+ # test with ?facts=false
+ self._get('{}?facts=false'.format(self.URL_HOST), version=APIVersion(1, 1))
+ self.assertStatus(200)
+ self.assertHeader('Content-Type',
+ 'application/vnd.ceph.api.v1.1+json')
+ self.assertJsonBody(hosts_without_facts)
+
+ def test_get_1(self):
+ mgr.list_servers.return_value = []
+
+ with patch_orch(False):
+ self._get('{}/node1'.format(self.URL_HOST))
+ self.assertStatus(404)
+
+ def test_get_2(self):
+ mgr.list_servers.return_value = [{'hostname': 'node1'}]
+
+ with patch_orch(False):
+ self._get('{}/node1'.format(self.URL_HOST))
+ self.assertStatus(200)
+ self.assertIn('labels', self.json_body())
+ self.assertIn('status', self.json_body())
+ self.assertIn('addr', self.json_body())
+
+ def test_get_3(self):
+ mgr.list_servers.return_value = []
+
+ with patch_orch(True, hosts=[HostSpec('node1')]):
+ self._get('{}/node1'.format(self.URL_HOST))
+ self.assertStatus(200)
+ self.assertIn('labels', self.json_body())
+ self.assertIn('status', self.json_body())
+ self.assertIn('addr', self.json_body())
+
+ @mock.patch('dashboard.controllers.host.add_host')
+ def test_add_host(self, mock_add_host):
+ with patch_orch(True):
+ payload = {
+ 'hostname': 'node0',
+ 'addr': '192.0.2.0',
+ 'labels': 'mon',
+ 'status': 'maintenance'
+ }
+ self._post(self.URL_HOST, payload, version=APIVersion(0, 1))
+ self.assertStatus(201)
+ mock_add_host.assert_called()
+
+ def test_set_labels(self):
+ mgr.list_servers.return_value = []
+ orch_hosts = [
+ HostSpec('node0', labels=['aaa', 'bbb'])
+ ]
+ with patch_orch(True, hosts=orch_hosts) as fake_client:
+ fake_client.hosts.remove_label = mock.Mock()
+ fake_client.hosts.add_label = mock.Mock()
+
+ payload = {'update_labels': True, 'labels': ['bbb', 'ccc']}
+ self._put('{}/node0'.format(self.URL_HOST), payload, version=APIVersion(0, 1))
+ self.assertStatus(200)
+ self.assertHeader('Content-Type',
+ 'application/vnd.ceph.api.v0.1+json')
+ fake_client.hosts.remove_label.assert_called_once_with('node0', 'aaa')
+ fake_client.hosts.add_label.assert_called_once_with('node0', 'ccc')
+
+ # return 400 if type other than List[str]
+ self._put('{}/node0'.format(self.URL_HOST),
+ {'update_labels': True, 'labels': 'ddd'},
+ version=APIVersion(0, 1))
+ self.assertStatus(400)
+
+ def test_host_maintenance(self):
+ mgr.list_servers.return_value = []
+ orch_hosts = [
+ HostSpec('node0'),
+ HostSpec('node1')
+ ]
+ with patch_orch(True, hosts=orch_hosts):
+ # enter maintenance mode
+ self._put('{}/node0'.format(self.URL_HOST), {'maintenance': True},
+ version=APIVersion(0, 1))
+ self.assertStatus(200)
+ self.assertHeader('Content-Type',
+ 'application/vnd.ceph.api.v0.1+json')
+
+ # force enter maintenance mode
+ self._put('{}/node1'.format(self.URL_HOST), {'maintenance': True, 'force': True},
+ version=APIVersion(0, 1))
+ self.assertStatus(200)
+
+ # exit maintenance mode
+ self._put('{}/node0'.format(self.URL_HOST), {'maintenance': True},
+ version=APIVersion(0, 1))
+ self.assertStatus(200)
+ self._put('{}/node1'.format(self.URL_HOST), {'maintenance': True},
+ version=APIVersion(0, 1))
+ self.assertStatus(200)
+
+ # maintenance without orchestrator service
+ with patch_orch(False):
+ self._put('{}/node0'.format(self.URL_HOST), {'maintenance': True},
+ version=APIVersion(0, 1))
+ self.assertStatus(503)
+
+ @mock.patch('dashboard.controllers.host.time')
+ def test_identify_device(self, mock_time):
+ url = '{}/host-0/identify_device'.format(self.URL_HOST)
+ with patch_orch(True) as fake_client:
+ payload = {
+ 'device': '/dev/sdz',
+ 'duration': '1'
+ }
+ self._task_post(url, payload)
+ self.assertStatus(200)
+ mock_time.sleep.assert_called()
+ calls = [
+ mock.call('host-0', '/dev/sdz', 'ident', True),
+ mock.call('host-0', '/dev/sdz', 'ident', False),
+ ]
+ fake_client.blink_device_light.assert_has_calls(calls)
+
+ @mock.patch('dashboard.controllers.host.get_inventories')
+ def test_inventory(self, mock_get_inventories):
+ inventory_url = '{}/host-0/inventory'.format(self.URL_HOST)
+ with patch_orch(True):
+ tests = [
+ {
+ 'url': inventory_url,
+ 'inventories': [{'a': 'b'}],
+ 'refresh': None,
+ 'resp': {'a': 'b'}
+ },
+ {
+ 'url': '{}?refresh=true'.format(inventory_url),
+ 'inventories': [{'a': 'b'}],
+ 'refresh': "true",
+ 'resp': {'a': 'b'}
+ },
+ {
+ 'url': inventory_url,
+ 'inventories': [],
+ 'refresh': None,
+ 'resp': {}
+ },
+ ]
+ for test in tests:
+ mock_get_inventories.reset_mock()
+ mock_get_inventories.return_value = test['inventories']
+ self._get(test['url'])
+ mock_get_inventories.assert_called_once_with(['host-0'], test['refresh'])
+ self.assertEqual(self.json_body(), test['resp'])
+ self.assertStatus(200)
+
+ # list without orchestrator service
+ with patch_orch(False):
+ self._get(inventory_url)
+ self.assertStatus(503)
+
+ def test_host_drain(self):
+ mgr.list_servers.return_value = []
+ orch_hosts = [
+ HostSpec('node0')
+ ]
+ with patch_orch(True, hosts=orch_hosts):
+ self._put('{}/node0'.format(self.URL_HOST), {'drain': True},
+ version=APIVersion(0, 1))
+ self.assertStatus(200)
+ self.assertHeader('Content-Type',
+ 'application/vnd.ceph.api.v0.1+json')
+
+ # maintenance without orchestrator service
+ with patch_orch(False):
+ self._put('{}/node0'.format(self.URL_HOST), {'drain': True},
+ version=APIVersion(0, 1))
+ self.assertStatus(503)
+
+
+class HostUiControllerTest(ControllerTestCase):
+ URL_HOST = '/ui-api/host'
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([HostUi])
+
+ def test_labels(self):
+ orch_hosts = [
+ HostSpec('node1', labels=['foo']),
+ HostSpec('node2', labels=['foo', 'bar'])
+ ]
+
+ with patch_orch(True, hosts=orch_hosts):
+ self._get('{}/labels'.format(self.URL_HOST))
+ self.assertStatus(200)
+ labels = self.json_body()
+ labels.sort()
+ self.assertListEqual(labels, ['bar', 'foo'])
+
+ @mock.patch('dashboard.controllers.host.get_inventories')
+ def test_inventory(self, mock_get_inventories):
+ inventory_url = '{}/inventory'.format(self.URL_HOST)
+ with patch_orch(True):
+ tests = [
+ {
+ 'url': inventory_url,
+ 'refresh': None
+ },
+ {
+ 'url': '{}?refresh=true'.format(inventory_url),
+ 'refresh': "true"
+ },
+ ]
+ for test in tests:
+ mock_get_inventories.reset_mock()
+ mock_get_inventories.return_value = [{'a': 'b'}]
+ self._get(test['url'])
+ mock_get_inventories.assert_called_once_with(None, test['refresh'])
+ self.assertEqual(self.json_body(), [{'a': 'b'}])
+ self.assertStatus(200)
+
+ # list without orchestrator service
+ with patch_orch(False):
+ self._get(inventory_url)
+ self.assertStatus(503)
+
+
+class TestHosts(unittest.TestCase):
+ def test_get_hosts(self):
+ mgr.list_servers.return_value = [{
+ 'hostname': 'node1'
+ }, {
+ 'hostname': 'localhost'
+ }]
+ orch_hosts = [
+ HostSpec('node1', labels=['foo', 'bar']),
+ HostSpec('node2', labels=['bar'])
+ ]
+
+ with patch_orch(True, hosts=orch_hosts):
+ hosts = get_hosts()
+ self.assertEqual(len(hosts), 3)
+ checks = {
+ 'localhost': {
+ 'sources': {
+ 'ceph': True,
+ 'orchestrator': False
+ },
+ 'labels': []
+ },
+ 'node1': {
+ 'sources': {
+ 'ceph': True,
+ 'orchestrator': True
+ },
+ 'labels': ['bar', 'foo']
+ },
+ 'node2': {
+ 'sources': {
+ 'ceph': False,
+ 'orchestrator': True
+ },
+ 'labels': ['bar']
+ }
+ }
+ for host in hosts:
+ hostname = host['hostname']
+ self.assertDictEqual(host['sources'], checks[hostname]['sources'])
+ self.assertListEqual(host['labels'], checks[hostname]['labels'])
+
+ @mock.patch('dashboard.controllers.host.mgr.get')
+ def test_get_device_osd_map(self, mgr_get):
+ mgr_get.side_effect = lambda key: {
+ 'osd_metadata': {
+ '0': {
+ 'hostname': 'node0',
+ 'devices': 'nvme0n1,sdb',
+ },
+ '1': {
+ 'hostname': 'node0',
+ 'devices': 'nvme0n1,sdc',
+ },
+ '2': {
+ 'hostname': 'node1',
+ 'devices': 'sda',
+ },
+ '3': {
+ 'hostname': 'node2',
+ 'devices': '',
+ }
+ }
+ }[key]
+
+ device_osd_map = get_device_osd_map()
+ mgr.get.assert_called_with('osd_metadata')
+ # sort OSD IDs to make assertDictEqual work
+ for devices in device_osd_map.values():
+ for host in devices.keys():
+ devices[host] = sorted(devices[host])
+ self.assertDictEqual(device_osd_map, {
+ 'node0': {
+ 'nvme0n1': [0, 1],
+ 'sdb': [0],
+ 'sdc': [1],
+ },
+ 'node1': {
+ 'sda': [2]
+ }
+ })
+
+ @mock.patch('dashboard.controllers.host.str_to_bool')
+ @mock.patch('dashboard.controllers.host.get_device_osd_map')
+ def test_get_inventories(self, mock_get_device_osd_map, mock_str_to_bool):
+ mock_get_device_osd_map.return_value = {
+ 'host-0': {
+ 'nvme0n1': [1, 2],
+ 'sdb': [1],
+ 'sdc': [2]
+ },
+ 'host-1': {
+ 'sdb': [3]
+ }
+ }
+ inventory = [
+ {
+ 'name': 'host-0',
+ 'addr': '1.2.3.4',
+ 'devices': [
+ {'path': 'nvme0n1'},
+ {'path': '/dev/sdb'},
+ {'path': '/dev/sdc'},
+ ]
+ },
+ {
+ 'name': 'host-1',
+ 'addr': '1.2.3.5',
+ 'devices': [
+ {'path': '/dev/sda'},
+ {'path': 'sdb'},
+ ]
+ }
+ ]
+
+ with patch_orch(True, inventory=inventory) as orch_client:
+ mock_str_to_bool.return_value = True
+
+ hosts = ['host-0', 'host-1']
+ inventories = get_inventories(hosts, 'true')
+ mock_str_to_bool.assert_called_with('true')
+ orch_client.inventory.list.assert_called_once_with(hosts=hosts, refresh=True)
+ self.assertEqual(len(inventories), 2)
+ host0 = inventories[0]
+ self.assertEqual(host0['name'], 'host-0')
+ self.assertEqual(host0['addr'], '1.2.3.4')
+ self.assertEqual(host0['devices'][0]['osd_ids'], [1, 2])
+ self.assertEqual(host0['devices'][1]['osd_ids'], [1])
+ self.assertEqual(host0['devices'][2]['osd_ids'], [2])
+ host1 = inventories[1]
+ self.assertEqual(host1['name'], 'host-1')
+ self.assertEqual(host1['addr'], '1.2.3.5')
+ self.assertEqual(host1['devices'][0]['osd_ids'], [])
+ self.assertEqual(host1['devices'][1]['osd_ids'], [3])
diff --git a/src/pybind/mgr/dashboard/tests/test_iscsi.py b/src/pybind/mgr/dashboard/tests/test_iscsi.py
new file mode 100644
index 000000000..7728a496b
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_iscsi.py
@@ -0,0 +1,1008 @@
+# pylint: disable=too-many-public-methods, too-many-lines
+
+import copy
+import errno
+import json
+import unittest
+
+try:
+ import mock
+except ImportError:
+ import unittest.mock as mock
+
+from mgr_module import ERROR_MSG_NO_INPUT_FILE
+
+from .. import mgr
+from ..controllers.iscsi import Iscsi, IscsiTarget
+from ..rest_client import RequestException
+from ..services.iscsi_client import IscsiClient
+from ..services.orchestrator import OrchClient
+from ..tests import CLICommandTestMixin, CmdException, ControllerTestCase, KVStoreMockMixin
+from ..tools import NotificationQueue, TaskManager
+
+
+class IscsiTestCli(unittest.TestCase, CLICommandTestMixin):
+
+ def setUp(self):
+ self.mock_kv_store()
+ # pylint: disable=protected-access
+ IscsiClientMock._instance = IscsiClientMock()
+ IscsiClient.instance = IscsiClientMock.instance
+
+ def test_cli_add_gateway_invalid_url(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('iscsi-gateway-add', name='node1',
+ inbuf='http:/hello.com')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertEqual(str(ctx.exception),
+ "Invalid service URL 'http:/hello.com'. Valid format: "
+ "'<scheme>://<username>:<password>@<host>[:port]'.")
+
+ def test_cli_add_gateway_empty_url(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('iscsi-gateway-add', name='node1',
+ inbuf='')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertIn(ERROR_MSG_NO_INPUT_FILE, str(ctx.exception))
+
+ def test_cli_add_gateway(self):
+ self.exec_cmd('iscsi-gateway-add', name='node1',
+ inbuf='https://admin:admin@10.17.5.1:5001')
+ self.exec_cmd('iscsi-gateway-add', name='node2',
+ inbuf='https://admin:admin@10.17.5.2:5001')
+ iscsi_config = json.loads(self.get_key("_iscsi_config"))
+ self.assertEqual(iscsi_config['gateways'], {
+ 'node1': {
+ 'service_url': 'https://admin:admin@10.17.5.1:5001'
+ },
+ 'node2': {
+ 'service_url': 'https://admin:admin@10.17.5.2:5001'
+ }
+ })
+
+ def test_cli_remove_gateway(self):
+ self.test_cli_add_gateway()
+ self.exec_cmd('iscsi-gateway-rm', name='node1')
+ iscsi_config = json.loads(self.get_key("_iscsi_config"))
+ self.assertEqual(iscsi_config['gateways'], {
+ 'node2': {
+ 'service_url': 'https://admin:admin@10.17.5.2:5001'
+ }
+ })
+
+
+class IscsiTestController(ControllerTestCase, KVStoreMockMixin):
+
+ @classmethod
+ def setup_server(cls):
+ NotificationQueue.start_queue()
+ TaskManager.init()
+ OrchClient.instance().available = lambda: False
+ mgr.rados.side_effect = None
+ cls.setup_controllers([Iscsi, IscsiTarget])
+
+ @classmethod
+ def tearDownClass(cls):
+ NotificationQueue.stop()
+
+ def setUp(self):
+ self.mock_kv_store()
+ self.CONFIG_KEY_DICT['_iscsi_config'] = '''
+ {
+ "gateways": {
+ "node1": {
+ "service_url": "https://admin:admin@10.17.5.1:5001"
+ },
+ "node2": {
+ "service_url": "https://admin:admin@10.17.5.2:5001"
+ }
+ }
+ }
+ '''
+ # pylint: disable=protected-access
+ IscsiClientMock._instance = IscsiClientMock()
+ IscsiClient.instance = IscsiClientMock.instance
+
+ def test_enable_discoveryauth(self):
+ discoveryauth = {
+ 'user': 'myiscsiusername',
+ 'password': 'myiscsipassword',
+ 'mutual_user': 'myiscsiusername2',
+ 'mutual_password': 'myiscsipassword2'
+ }
+ self._put('/api/iscsi/discoveryauth', discoveryauth)
+ self.assertStatus(200)
+ self.assertJsonBody(discoveryauth)
+ self._get('/api/iscsi/discoveryauth')
+ self.assertStatus(200)
+ self.assertJsonBody(discoveryauth)
+
+ def test_bad_discoveryauth(self):
+ discoveryauth = {
+ 'user': 'myiscsiusername',
+ 'password': 'myiscsipasswordmyiscsipasswordmyiscsipassword',
+ 'mutual_user': '',
+ 'mutual_password': ''
+ }
+ put_response = {
+ 'detail': 'Bad authentication',
+ 'code': 'target_bad_auth',
+ 'component': 'iscsi'
+ }
+ get_response = {
+ 'user': '',
+ 'password': '',
+ 'mutual_user': '',
+ 'mutual_password': ''
+ }
+ self._put('/api/iscsi/discoveryauth', discoveryauth)
+ self.assertStatus(400)
+ self.assertJsonBody(put_response)
+ self._get('/api/iscsi/discoveryauth')
+ self.assertStatus(200)
+ self.assertJsonBody(get_response)
+
+ def test_disable_discoveryauth(self):
+ discoveryauth = {
+ 'user': '',
+ 'password': '',
+ 'mutual_user': '',
+ 'mutual_password': ''
+ }
+ self._put('/api/iscsi/discoveryauth', discoveryauth)
+ self.assertStatus(200)
+ self.assertJsonBody(discoveryauth)
+ self._get('/api/iscsi/discoveryauth')
+ self.assertStatus(200)
+ self.assertJsonBody(discoveryauth)
+
+ def test_list_empty(self):
+ self._get('/api/iscsi/target')
+ self.assertStatus(200)
+ self.assertJsonBody([])
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_list(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw1"
+ request = copy.deepcopy(iscsi_target_request)
+ request['target_iqn'] = target_iqn
+ self._task_post('/api/iscsi/target', request)
+ self.assertStatus(201)
+ self._get('/api/iscsi/target')
+ self.assertStatus(200)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ self.assertJsonBody([response])
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_create(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw2"
+ request = copy.deepcopy(iscsi_target_request)
+ request['target_iqn'] = target_iqn
+ self._task_post('/api/iscsi/target', request)
+ self.assertStatus(201)
+ self._get('/api/iscsi/target/{}'.format(request['target_iqn']))
+ self.assertStatus(200)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ self.assertJsonBody(response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_delete(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw3"
+ request = copy.deepcopy(iscsi_target_request)
+ request['target_iqn'] = target_iqn
+ self._task_post('/api/iscsi/target', request)
+ self.assertStatus(201)
+ self._task_delete('/api/iscsi/target/{}'.format(request['target_iqn']))
+ self.assertStatus(204)
+ self._get('/api/iscsi/target')
+ self.assertStatus(200)
+ self.assertJsonBody([])
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_client(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw4"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'].append(
+ {
+ "luns": [{"image": "lun1", "pool": "rbd"}],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client3",
+ "auth": {
+ "password": "myiscsipassword5",
+ "user": "myiscsiusername5",
+ "mutual_password": "myiscsipassword6",
+ "mutual_user": "myiscsiusername6"}
+ })
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'].append(
+ {
+ "luns": [{"image": "lun1", "pool": "rbd"}],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client3",
+ "auth": {
+ "password": "myiscsipassword5",
+ "user": "myiscsiusername5",
+ "mutual_password": "myiscsipassword6",
+ "mutual_user": "myiscsiusername6"},
+ "info": {
+ "alias": "",
+ "ip_address": [],
+ "state": {}
+ }
+ })
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_bad_client(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw4"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'].append(
+ {
+ "luns": [{"image": "lun1", "pool": "rbd"}],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client4",
+ "auth": {
+ "password": "myiscsipassword7myiscsipassword7myiscsipasswo",
+ "user": "myiscsiusername7",
+ "mutual_password": "myiscsipassword8",
+ "mutual_user": "myiscsiusername8"}
+ })
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+
+ self._task_post('/api/iscsi/target', create_request)
+ self.assertStatus(201)
+ self._task_put('/api/iscsi/target/{}'.format(create_request['target_iqn']), update_request)
+ self.assertStatus(400)
+ self._get('/api/iscsi/target/{}'.format(update_request['new_target_iqn']))
+ self.assertStatus(200)
+ self.assertJsonBody(response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_change_client_password(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw5"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'][0]['auth']['password'] = 'MyNewPassword'
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'][0]['auth']['password'] = 'MyNewPassword'
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_rename_client(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw6"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'][0]['client_iqn'] = 'iqn.1994-05.com.redhat:rh7-client0'
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'][0]['client_iqn'] = 'iqn.1994-05.com.redhat:rh7-client0'
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_disk(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw7"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['disks'].append(
+ {
+ "image": "lun3",
+ "pool": "rbd",
+ "controls": {},
+ "backstore": "user:rbd"
+ })
+ update_request['clients'][0]['luns'].append({"image": "lun3", "pool": "rbd"})
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['disks'].append(
+ {
+ "image": "lun3",
+ "pool": "rbd",
+ "controls": {},
+ "backstore": "user:rbd",
+ "wwn": "64af6678-9694-4367-bacc-f8eb0baa2",
+ "lun": 2
+
+ })
+ response['clients'][0]['luns'].append({"image": "lun3", "pool": "rbd"})
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_change_disk_image(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw8"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['disks'][0]['image'] = 'lun0'
+ update_request['clients'][0]['luns'][0]['image'] = 'lun0'
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['disks'][0]['image'] = 'lun0'
+ response['clients'][0]['luns'][0]['image'] = 'lun0'
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_change_disk_controls(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw9"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['disks'][0]['controls'] = {"qfull_timeout": 15}
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['disks'][0]['controls'] = {"qfull_timeout": 15}
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_rename_target(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw10"
+ new_target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw11"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = new_target_iqn
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = new_target_iqn
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_rename_group(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw12"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['groups'][0]['group_id'] = 'mygroup0'
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['groups'][0]['group_id'] = 'mygroup0'
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_client_to_group(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw13"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'].append(
+ {
+ "luns": [],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client3",
+ "auth": {
+ "password": None,
+ "user": None,
+ "mutual_password": None,
+ "mutual_user": None}
+ })
+ update_request['groups'][0]['members'].append('iqn.1994-05.com.redhat:rh7-client3')
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'].append(
+ {
+ "luns": [],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client3",
+ "auth": {
+ "password": None,
+ "user": None,
+ "mutual_password": None,
+ "mutual_user": None},
+ "info": {
+ "alias": "",
+ "ip_address": [],
+ "state": {}
+ }
+ })
+ response['groups'][0]['members'].append('iqn.1994-05.com.redhat:rh7-client3')
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_client_from_group(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw14"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['groups'][0]['members'].remove('iqn.1994-05.com.redhat:rh7-client2')
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['groups'][0]['members'].remove('iqn.1994-05.com.redhat:rh7-client2')
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_groups(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw15"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['groups'] = []
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['groups'] = []
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_client_to_multiple_groups(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw16"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ create_request['groups'].append(copy.deepcopy(create_request['groups'][0]))
+ create_request['groups'][1]['group_id'] = 'mygroup2'
+ self._task_post('/api/iscsi/target', create_request)
+ self.assertStatus(400)
+ self.assertJsonBody({
+ 'detail': 'Each initiator can only be part of 1 group at a time',
+ 'code': 'initiator_in_multiple_groups',
+ 'component': 'iscsi'
+ })
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_client_lun(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw17"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ create_request['clients'][0]['luns'] = [
+ {"image": "lun1", "pool": "rbd"},
+ {"image": "lun2", "pool": "rbd"},
+ {"image": "lun3", "pool": "rbd"}
+ ]
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'][0]['luns'] = [
+ {"image": "lun1", "pool": "rbd"},
+ {"image": "lun3", "pool": "rbd"}
+ ]
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'][0]['luns'] = [
+ {"image": "lun1", "pool": "rbd"},
+ {"image": "lun3", "pool": "rbd"}
+ ]
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_change_client_auth(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw18"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'][0]['auth']['password'] = 'myiscsipasswordX'
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'][0]['auth']['password'] = 'myiscsipasswordX'
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_client_logged_in(self, _validate_image_mock):
+ client_info = {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {'LOGGED_IN': ['node1']}
+ }
+ # pylint: disable=protected-access
+ IscsiClientMock._instance.clientinfo = client_info
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw19"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'].pop(0)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ for client in response['clients']:
+ client['info'] = client_info
+ update_response = {
+ 'detail': "Client 'iqn.1994-05.com.redhat:rh7-client' cannot be deleted until it's "
+ "logged out",
+ 'code': 'client_logged_in',
+ 'component': 'iscsi'
+ }
+ self._update_iscsi_target(create_request, update_request, 400, update_response, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_client(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw20"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'].pop(0)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'].pop(0)
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_image_to_group_with_client_logged_in(self, _validate_image_mock):
+ client_info = {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {'LOGGED_IN': ['node1']}
+ }
+ new_disk = {"pool": "rbd", "image": "lun1"}
+ # pylint: disable=protected-access
+ IscsiClientMock._instance.clientinfo = client_info
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw21"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['groups'][0]['disks'].append(new_disk)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['groups'][0]['disks'].insert(0, new_disk)
+ for client in response['clients']:
+ client['info'] = client_info
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_image_to_initiator_with_client_logged_in(self, _validate_image_mock):
+ client_info = {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {'LOGGED_IN': ['node1']}
+ }
+ new_disk = {"pool": "rbd", "image": "lun2"}
+ # pylint: disable=protected-access
+ IscsiClientMock._instance.clientinfo = client_info
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw22"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'][0]['luns'].append(new_disk)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'][0]['luns'].append(new_disk)
+ for client in response['clients']:
+ client['info'] = client_info
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_image_from_group_with_client_logged_in(self, _validate_image_mock):
+ client_info = {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {'LOGGED_IN': ['node1']}
+ }
+ # pylint: disable=protected-access
+ IscsiClientMock._instance.clientinfo = client_info
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw23"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['groups'][0]['disks'] = []
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['groups'][0]['disks'] = []
+ for client in response['clients']:
+ client['info'] = client_info
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ def _update_iscsi_target(self, create_request, update_request, update_response_code,
+ update_response, response):
+ self._task_post('/api/iscsi/target', create_request)
+ self.assertStatus(201)
+ self._task_put(
+ '/api/iscsi/target/{}'.format(create_request['target_iqn']), update_request)
+ self.assertStatus(update_response_code)
+ self.assertJsonBody(update_response)
+ self._get(
+ '/api/iscsi/target/{}'.format(update_request['new_target_iqn']))
+ self.assertStatus(200)
+ self.assertJsonBody(response)
+
+
+iscsi_target_request = {
+ "target_iqn": "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw",
+ "portals": [
+ {"ip": "192.168.100.202", "host": "node2"},
+ {"ip": "10.0.2.15", "host": "node2"},
+ {"ip": "192.168.100.203", "host": "node3"}
+ ],
+ "disks": [
+ {"image": "lun1", "pool": "rbd", "backstore": "user:rbd",
+ "controls": {"max_data_area_mb": 128}},
+ {"image": "lun2", "pool": "rbd", "backstore": "user:rbd",
+ "controls": {"max_data_area_mb": 128}}
+ ],
+ "clients": [
+ {
+ "luns": [{"image": "lun1", "pool": "rbd"}],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client",
+ "auth": {
+ "password": "myiscsipassword1",
+ "user": "myiscsiusername1",
+ "mutual_password": "myiscsipassword2",
+ "mutual_user": "myiscsiusername2"}
+ },
+ {
+ "luns": [],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client2",
+ "auth": {
+ "password": "myiscsipassword3",
+ "user": "myiscsiusername3",
+ "mutual_password": "myiscsipassword4",
+ "mutual_user": "myiscsiusername4"
+ }
+ }
+ ],
+ "acl_enabled": True,
+ "auth": {
+ "password": "",
+ "user": "",
+ "mutual_password": "",
+ "mutual_user": ""},
+ "target_controls": {},
+ "groups": [
+ {
+ "group_id": "mygroup",
+ "disks": [{"pool": "rbd", "image": "lun2"}],
+ "members": ["iqn.1994-05.com.redhat:rh7-client2"]
+ }
+ ]
+}
+
+iscsi_target_response = {
+ 'target_iqn': 'iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw',
+ 'portals': [
+ {'host': 'node2', 'ip': '10.0.2.15'},
+ {'host': 'node2', 'ip': '192.168.100.202'},
+ {'host': 'node3', 'ip': '192.168.100.203'}
+ ],
+ 'disks': [
+ {'pool': 'rbd', 'image': 'lun1', 'backstore': 'user:rbd',
+ 'wwn': '64af6678-9694-4367-bacc-f8eb0baa0', 'lun': 0,
+ 'controls': {'max_data_area_mb': 128}},
+ {'pool': 'rbd', 'image': 'lun2', 'backstore': 'user:rbd',
+ 'wwn': '64af6678-9694-4367-bacc-f8eb0baa1', 'lun': 1,
+ 'controls': {'max_data_area_mb': 128}}
+ ],
+ 'clients': [
+ {
+ 'client_iqn': 'iqn.1994-05.com.redhat:rh7-client',
+ 'luns': [{'pool': 'rbd', 'image': 'lun1'}],
+ 'auth': {
+ 'user': 'myiscsiusername1',
+ 'password': 'myiscsipassword1',
+ 'mutual_password': 'myiscsipassword2',
+ 'mutual_user': 'myiscsiusername2'
+ },
+ 'info': {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {}
+ }
+ },
+ {
+ 'client_iqn': 'iqn.1994-05.com.redhat:rh7-client2',
+ 'luns': [],
+ 'auth': {
+ 'user': 'myiscsiusername3',
+ 'password': 'myiscsipassword3',
+ 'mutual_password': 'myiscsipassword4',
+ 'mutual_user': 'myiscsiusername4'
+ },
+ 'info': {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {}
+ }
+ }
+ ],
+ "acl_enabled": True,
+ "auth": {
+ "password": "",
+ "user": "",
+ "mutual_password": "",
+ "mutual_user": ""},
+ 'groups': [
+ {
+ 'group_id': 'mygroup',
+ 'disks': [{'pool': 'rbd', 'image': 'lun2'}],
+ 'members': ['iqn.1994-05.com.redhat:rh7-client2']
+ }
+ ],
+ 'target_controls': {},
+ 'info': {
+ 'num_sessions': 0
+ }
+}
+
+
+class IscsiClientMock(object):
+
+ _instance = None
+
+ def __init__(self):
+ self.gateway_name = None
+ self.service_url = None
+ self.config = {
+ "created": "2019/01/17 08:57:16",
+ "discovery_auth": {
+ "username": "",
+ "password": "",
+ "password_encryption_enabled": False,
+ "mutual_username": "",
+ "mutual_password": "",
+ "mutual_password_encryption_enabled": False
+ },
+ "disks": {},
+ "epoch": 0,
+ "gateways": {},
+ "targets": {},
+ "updated": "",
+ "version": 11
+ }
+ self.clientinfo = {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {}
+ }
+
+ @classmethod
+ def instance(cls, gateway_name=None, service_url=None):
+ cls._instance.gateway_name = gateway_name
+ cls._instance.service_url = service_url
+ # pylint: disable=unused-argument
+ return cls._instance
+
+ def ping(self):
+ return {
+ "message": "pong"
+ }
+
+ def get_settings(self):
+ return {
+ "api_version": 2,
+ "backstores": [
+ "user:rbd"
+ ],
+ "config": {
+ "minimum_gateways": 2
+ },
+ "default_backstore": "user:rbd",
+ "required_rbd_features": {
+ "rbd": 0,
+ "user:rbd": 4,
+ },
+ "unsupported_rbd_features": {
+ "rbd": 88,
+ "user:rbd": 0,
+ },
+ "disk_default_controls": {
+ "user:rbd": {
+ "hw_max_sectors": 1024,
+ "max_data_area_mb": 8,
+ "osd_op_timeout": 30,
+ "qfull_timeout": 5
+ }
+ },
+ "target_default_controls": {
+ "cmdsn_depth": 128,
+ "dataout_timeout": 20,
+ "first_burst_length": 262144,
+ "immediate_data": "Yes",
+ "initial_r2t": "Yes",
+ "max_burst_length": 524288,
+ "max_outstanding_r2t": 1,
+ "max_recv_data_segment_length": 262144,
+ "max_xmit_data_segment_length": 262144,
+ "nopin_response_timeout": 5,
+ "nopin_timeout": 5
+ }
+ }
+
+ def get_config(self):
+ return copy.deepcopy(self.config)
+
+ def create_target(self, target_iqn, target_controls):
+ self.config['targets'][target_iqn] = {
+ "clients": {},
+ "acl_enabled": True,
+ "auth": {
+ "username": "",
+ "password": "",
+ "password_encryption_enabled": False,
+ "mutual_username": "",
+ "mutual_password": "",
+ "mutual_password_encryption_enabled": False
+ },
+ "controls": target_controls,
+ "created": "2019/01/17 09:22:34",
+ "disks": {},
+ "groups": {},
+ "portals": {}
+ }
+
+ def create_gateway(self, target_iqn, gateway_name, ip_addresses):
+ target_config = self.config['targets'][target_iqn]
+ if 'ip_list' not in target_config:
+ target_config['ip_list'] = []
+ target_config['ip_list'] += ip_addresses
+ target_config['portals'][gateway_name] = {
+ "portal_ip_addresses": ip_addresses
+ }
+
+ def delete_gateway(self, target_iqn, gateway_name):
+ target_config = self.config['targets'][target_iqn]
+ portal_config = target_config['portals'][gateway_name]
+ for ip in portal_config['portal_ip_addresses']:
+ target_config['ip_list'].remove(ip)
+ target_config['portals'].pop(gateway_name)
+
+ def create_disk(self, pool, image, backstore, wwn):
+ if wwn is None:
+ wwn = '64af6678-9694-4367-bacc-f8eb0baa' + str(len(self.config['disks']))
+ image_id = '{}/{}'.format(pool, image)
+ self.config['disks'][image_id] = {
+ "pool": pool,
+ "image": image,
+ "backstore": backstore,
+ "controls": {},
+ "wwn": wwn
+ }
+
+ def create_target_lun(self, target_iqn, image_id, lun):
+ target_config = self.config['targets'][target_iqn]
+ if lun is None:
+ lun = len(target_config['disks'])
+ target_config['disks'][image_id] = {
+ "lun_id": lun
+ }
+ self.config['disks'][image_id]['owner'] = list(target_config['portals'].keys())[0]
+
+ def reconfigure_disk(self, pool, image, controls):
+ image_id = '{}/{}'.format(pool, image)
+ settings = self.get_settings()
+ backstore = self.config['disks'][image_id]['backstore']
+ disk_default_controls = settings['disk_default_controls'][backstore]
+ new_controls = {}
+ for control_k, control_v in controls.items():
+ if control_v != disk_default_controls[control_k]:
+ new_controls[control_k] = control_v
+ self.config['disks'][image_id]['controls'] = new_controls
+
+ def create_client(self, target_iqn, client_iqn):
+ target_config = self.config['targets'][target_iqn]
+ target_config['clients'][client_iqn] = {
+ "auth": {
+ "username": "",
+ "password": "",
+ "password_encryption_enabled": False,
+ "mutual_username": "",
+ "mutual_password": "",
+ "mutual_password_encryption_enabled": False
+ },
+ "group_name": "",
+ "luns": {}
+ }
+
+ def create_client_lun(self, target_iqn, client_iqn, image_id):
+ target_config = self.config['targets'][target_iqn]
+ target_config['clients'][client_iqn]['luns'][image_id] = {}
+
+ def delete_client_lun(self, target_iqn, client_iqn, image_id):
+ target_config = self.config['targets'][target_iqn]
+ del target_config['clients'][client_iqn]['luns'][image_id]
+
+ def create_client_auth(self, target_iqn, client_iqn, user, password, m_user, m_password):
+ target_config = self.config['targets'][target_iqn]
+ target_config['clients'][client_iqn]['auth']['username'] = user
+ target_config['clients'][client_iqn]['auth']['password'] = password
+ target_config['clients'][client_iqn]['auth']['mutual_username'] = m_user
+ target_config['clients'][client_iqn]['auth']['mutual_password'] = m_password
+
+ def create_group(self, target_iqn, group_name, members, image_ids):
+ target_config = self.config['targets'][target_iqn]
+ target_config['groups'][group_name] = {
+ "disks": {},
+ "members": []
+ }
+ for image_id in image_ids:
+ target_config['groups'][group_name]['disks'][image_id] = {}
+ target_config['groups'][group_name]['members'] = members
+
+ def update_group(self, target_iqn, group_name, members, image_ids):
+ target_config = self.config['targets'][target_iqn]
+ group = target_config['groups'][group_name]
+ old_members = group['members']
+ disks = group['disks']
+ target_config['groups'][group_name] = {
+ "disks": {},
+ "members": []
+ }
+
+ for image_id in disks.keys():
+ if image_id not in image_ids:
+ target_config['groups'][group_name]['disks'][image_id] = {}
+
+ new_members = []
+ for member_iqn in old_members:
+ if member_iqn not in members:
+ new_members.append(member_iqn)
+ target_config['groups'][group_name]['members'] = new_members
+
+ def delete_group(self, target_iqn, group_name):
+ target_config = self.config['targets'][target_iqn]
+ del target_config['groups'][group_name]
+
+ def delete_client(self, target_iqn, client_iqn):
+ target_config = self.config['targets'][target_iqn]
+ del target_config['clients'][client_iqn]
+
+ def delete_target_lun(self, target_iqn, image_id):
+ target_config = self.config['targets'][target_iqn]
+ target_config['disks'].pop(image_id)
+ del self.config['disks'][image_id]['owner']
+
+ def delete_disk(self, pool, image):
+ image_id = '{}/{}'.format(pool, image)
+ del self.config['disks'][image_id]
+
+ def delete_target(self, target_iqn):
+ del self.config['targets'][target_iqn]
+
+ def get_ip_addresses(self):
+ ips = {
+ 'node1': ['192.168.100.201'],
+ 'node2': ['192.168.100.202', '10.0.2.15'],
+ 'node3': ['192.168.100.203']
+ }
+ return {'data': ips[self.gateway_name]}
+
+ def get_hostname(self):
+ hostnames = {
+ 'https://admin:admin@10.17.5.1:5001': 'node1',
+ 'https://admin:admin@10.17.5.2:5001': 'node2',
+ 'https://admin:admin@10.17.5.3:5001': 'node3'
+ }
+ if self.service_url not in hostnames:
+ raise RequestException('No route to host')
+ return {'data': hostnames[self.service_url]}
+
+ def update_discoveryauth(self, user, password, mutual_user, mutual_password):
+ self.config['discovery_auth']['username'] = user
+ self.config['discovery_auth']['password'] = password
+ self.config['discovery_auth']['mutual_username'] = mutual_user
+ self.config['discovery_auth']['mutual_password'] = mutual_password
+
+ def update_targetacl(self, target_iqn, action):
+ self.config['targets'][target_iqn]['acl_enabled'] = (action == 'enable_acl')
+
+ def update_targetauth(self, target_iqn, user, password, mutual_user, mutual_password):
+ target_config = self.config['targets'][target_iqn]
+ target_config['auth']['username'] = user
+ target_config['auth']['password'] = password
+ target_config['auth']['mutual_username'] = mutual_user
+ target_config['auth']['mutual_password'] = mutual_password
+
+ def get_targetinfo(self, target_iqn):
+ # pylint: disable=unused-argument
+ return {
+ 'num_sessions': 0
+ }
+
+ def get_clientinfo(self, target_iqn, client_iqn):
+ # pylint: disable=unused-argument
+ return self.clientinfo
diff --git a/src/pybind/mgr/dashboard/tests/test_nfs.py b/src/pybind/mgr/dashboard/tests/test_nfs.py
new file mode 100644
index 000000000..5e71b6525
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_nfs.py
@@ -0,0 +1,240 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=too-many-lines
+from copy import deepcopy
+from unittest.mock import Mock, patch
+from urllib.parse import urlencode
+
+from .. import mgr
+from ..controllers._version import APIVersion
+from ..controllers.nfs import NFSGaneshaExports, NFSGaneshaUi
+from ..tests import ControllerTestCase
+from ..tools import NotificationQueue, TaskManager
+
+
+class NFSGaneshaExportsTest(ControllerTestCase):
+ _nfs_module_export = {
+ "export_id": 1,
+ "path": "bk1",
+ "cluster_id": "myc",
+ "pseudo": "/bk-ps",
+ "access_type": "RO",
+ "squash": "root_id_squash",
+ "security_label": False,
+ "protocols": [
+ 4
+ ],
+ "transports": [
+ "TCP",
+ "UDP"
+ ],
+ "fsal": {
+ "name": "RGW",
+ "user_id": "dashboard",
+ "access_key_id": "UUU5YVVOQ2P5QTOPYNAN",
+ "secret_access_key": "7z87tMUUsHr67ZWx12pCbWkp9UyOldxhDuPY8tVN"
+ },
+ "clients": []
+ }
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls._expected_export = deepcopy(cls._nfs_module_export)
+ del cls._expected_export['fsal']['access_key_id']
+ del cls._expected_export['fsal']['secret_access_key']
+
+ @classmethod
+ def tearDownClass(cls):
+ super().tearDownClass()
+ NotificationQueue.stop()
+
+ @classmethod
+ def setup_server(cls):
+ NotificationQueue.start_queue()
+ TaskManager.init()
+ cls.setup_controllers([NFSGaneshaExports])
+
+ def test_list_exports(self):
+ mgr.remote = Mock(return_value=[self._nfs_module_export])
+
+ self._get('/api/nfs-ganesha/export')
+ self.assertStatus(200)
+ self.assertJsonBody([self._expected_export])
+
+ def test_get_export(self):
+ mgr.remote = Mock(return_value=self._nfs_module_export)
+
+ self._get('/api/nfs-ganesha/export/myc/1')
+ self.assertStatus(200)
+ self.assertJsonBody(self._expected_export)
+
+ def test_create_export(self):
+ export_mgr = Mock()
+ created_nfs_export = deepcopy(self._nfs_module_export)
+ created_nfs_export['pseudo'] = 'new-pseudo'
+ created_nfs_export['export_id'] = 2
+ export_mgr.get_export_by_pseudo.side_effect = [None, created_nfs_export]
+ export_mgr.apply_export.return_value = (0, '', '')
+ mgr.remote.return_value = export_mgr
+
+ export_create_body = deepcopy(self._expected_export)
+ del export_create_body['export_id']
+ export_create_body['pseudo'] = created_nfs_export['pseudo']
+
+ self._post('/api/nfs-ganesha/export',
+ export_create_body,
+ version=APIVersion(2, 0))
+ self.assertStatus(201)
+ expected_body = export_create_body
+ expected_body['export_id'] = created_nfs_export['export_id']
+ self.assertJsonBody(export_create_body)
+
+ def test_create_export_with_existing_pseudo_fails(self):
+ export_mgr = Mock()
+ export_mgr.get_export_by_pseudo.return_value = self._nfs_module_export
+ mgr.remote.return_value = export_mgr
+
+ export_create_body = deepcopy(self._expected_export)
+ del export_create_body['export_id']
+
+ self._post('/api/nfs-ganesha/export',
+ export_create_body,
+ version=APIVersion(2, 0))
+ self.assertStatus(400)
+ response = self.json_body()
+ self.assertIn(f'Pseudo {export_create_body["pseudo"]} is already in use',
+ response['detail'])
+
+ def test_set_export(self):
+ export_mgr = Mock()
+ updated_nfs_export = deepcopy(self._nfs_module_export)
+ updated_nfs_export['pseudo'] = 'updated-pseudo'
+ export_mgr.get_export_by_pseudo.return_value = updated_nfs_export
+ export_mgr.apply_export.return_value = (0, '', '')
+ mgr.remote.return_value = export_mgr
+
+ updated_export_body = deepcopy(self._expected_export)
+ updated_export_body['pseudo'] = updated_nfs_export['pseudo']
+
+ self._put('/api/nfs-ganesha/export/myc/2',
+ updated_export_body,
+ version=APIVersion(2, 0))
+ self.assertStatus(200)
+ self.assertJsonBody(updated_export_body)
+
+ def test_delete_export(self):
+ mgr.remote = Mock(side_effect=[self._nfs_module_export, None])
+
+ self._delete('/api/nfs-ganesha/export/myc/2',
+ version=APIVersion(2, 0))
+ self.assertStatus(204)
+
+ def test_delete_export_not_found(self):
+ mgr.remote = Mock(return_value=None)
+
+ self._delete('/api/nfs-ganesha/export/myc/3',
+ version=APIVersion(2, 0))
+ self.assertStatus(404)
+
+
+class NFSGaneshaUiControllerTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([NFSGaneshaUi])
+
+ @classmethod
+ def _create_ls_dir_url(cls, fs_name, query_params):
+ api_url = '/ui-api/nfs-ganesha/lsdir/{}'.format(fs_name)
+ if query_params is not None:
+ return '{}?{}'.format(api_url, urlencode(query_params))
+ return api_url
+
+ @patch('dashboard.controllers.nfs.CephFS')
+ def test_lsdir(self, cephfs_class):
+ cephfs_class.return_value.ls_dir.return_value = [
+ {'path': '/foo'},
+ {'path': '/foo/bar'}
+ ]
+ mocked_ls_dir = cephfs_class.return_value.ls_dir
+
+ reqs = [
+ {
+ 'params': None,
+ 'cephfs_ls_dir_args': ['/', 1],
+ 'path0': '/',
+ 'status': 200
+ },
+ {
+ 'params': {'root_dir': '/', 'depth': '1'},
+ 'cephfs_ls_dir_args': ['/', 1],
+ 'path0': '/',
+ 'status': 200
+ },
+ {
+ 'params': {'root_dir': '', 'depth': '1'},
+ 'cephfs_ls_dir_args': ['/', 1],
+ 'path0': '/',
+ 'status': 200
+ },
+ {
+ 'params': {'root_dir': '/foo', 'depth': '3'},
+ 'cephfs_ls_dir_args': ['/foo', 3],
+ 'path0': '/foo',
+ 'status': 200
+ },
+ {
+ 'params': {'root_dir': 'foo', 'depth': '6'},
+ 'cephfs_ls_dir_args': ['/foo', 5],
+ 'path0': '/foo',
+ 'status': 200
+ },
+ {
+ 'params': {'root_dir': '/', 'depth': '-1'},
+ 'status': 400
+ },
+ {
+ 'params': {'root_dir': '/', 'depth': 'abc'},
+ 'status': 400
+ }
+ ]
+
+ for req in reqs:
+ self._get(self._create_ls_dir_url('a', req['params']))
+ self.assertStatus(req['status'])
+
+ # Returned paths should contain root_dir as first element
+ if req['status'] == 200:
+ paths = self.json_body()['paths']
+ self.assertEqual(paths[0], req['path0'])
+ cephfs_class.assert_called_once_with('a')
+
+ # Check the arguments passed to `CephFS.ls_dir`.
+ if req.get('cephfs_ls_dir_args'):
+ mocked_ls_dir.assert_called_once_with(*req['cephfs_ls_dir_args'])
+ else:
+ mocked_ls_dir.assert_not_called()
+ mocked_ls_dir.reset_mock()
+ cephfs_class.reset_mock()
+
+ @patch('dashboard.controllers.nfs.cephfs')
+ @patch('dashboard.controllers.nfs.CephFS')
+ def test_lsdir_non_existed_dir(self, cephfs_class, cephfs):
+ cephfs.ObjectNotFound = Exception
+ cephfs.PermissionError = Exception
+ cephfs_class.return_value.ls_dir.side_effect = cephfs.ObjectNotFound()
+ self._get(self._create_ls_dir_url('a', {'root_dir': '/foo', 'depth': '3'}))
+ cephfs_class.assert_called_once_with('a')
+ cephfs_class.return_value.ls_dir.assert_called_once_with('/foo', 3)
+ self.assertStatus(200)
+ self.assertJsonBody({'paths': []})
+
+ def test_status_available(self):
+ self._get('/ui-api/nfs-ganesha/status')
+ self.assertStatus(200)
+ self.assertJsonBody({'available': True, 'message': None})
+
+ def test_status_not_available(self):
+ mgr.remote = Mock(side_effect=RuntimeError('Test'))
+ self._get('/ui-api/nfs-ganesha/status')
+ self.assertStatus(200)
+ self.assertJsonBody({'available': False, 'message': 'Test'})
diff --git a/src/pybind/mgr/dashboard/tests/test_notification.py b/src/pybind/mgr/dashboard/tests/test_notification.py
new file mode 100644
index 000000000..df303e455
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_notification.py
@@ -0,0 +1,137 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import random
+import time
+import unittest
+
+from ..tools import NotificationQueue
+
+
+class Listener(object):
+ # pylint: disable=too-many-instance-attributes
+ def __init__(self):
+ self.type1 = []
+ self.type1_ts = []
+ self.type2 = []
+ self.type2_ts = []
+ self.type1_3 = []
+ self.type1_3_ts = []
+ self.all = []
+ self.all_ts = []
+
+ def register(self):
+ NotificationQueue.register(self.log_type1, 'type1', priority=90)
+ NotificationQueue.register(self.log_type2, 'type2')
+ NotificationQueue.register(self.log_type1_3, ['type1', 'type3'])
+ NotificationQueue.register(self.log_all, priority=50)
+
+ # these should be ignored by the queue
+ NotificationQueue.register(self.log_type1, 'type1')
+ NotificationQueue.register(self.log_type1_3, ['type1', 'type3'])
+ NotificationQueue.register(self.log_all)
+
+ def log_type1(self, val):
+ self.type1_ts.append(time.time())
+ self.type1.append(val)
+
+ def log_type2(self, val):
+ self.type2_ts.append(time.time())
+ self.type2.append(val)
+
+ def log_type1_3(self, val):
+ self.type1_3_ts.append(time.time())
+ self.type1_3.append(val)
+
+ def log_all(self, val):
+ self.all_ts.append(time.time())
+ self.all.append(val)
+
+ def clear(self):
+ self.type1 = []
+ self.type1_ts = []
+ self.type2 = []
+ self.type2_ts = []
+ self.type1_3 = []
+ self.type1_3_ts = []
+ self.all = []
+ self.all_ts = []
+ NotificationQueue.deregister(self.log_type1, 'type1')
+ NotificationQueue.deregister(self.log_type2, 'type2')
+ NotificationQueue.deregister(self.log_type1_3, ['type1', 'type3'])
+ NotificationQueue.deregister(self.log_all)
+
+
+class NotificationQueueTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.listener = Listener()
+
+ def setUp(self):
+ self.listener.register()
+
+ def tearDown(self):
+ self.listener.clear()
+
+ def test_invalid_register(self):
+ with self.assertRaises(Exception) as ctx:
+ NotificationQueue.register(None, 1)
+ self.assertEqual(str(ctx.exception),
+ "n_types param is neither a string nor a list")
+
+ def test_notifications(self):
+ NotificationQueue.start_queue()
+ NotificationQueue.new_notification('type1', 1)
+ NotificationQueue.new_notification('type2', 2)
+ NotificationQueue.new_notification('type3', 3)
+ NotificationQueue.stop()
+ self.assertEqual(self.listener.type1, [1])
+ self.assertEqual(self.listener.type2, [2])
+ self.assertEqual(self.listener.type1_3, [1, 3])
+ self.assertEqual(self.listener.all, [1, 2, 3])
+
+ # validate priorities
+ self.assertLessEqual(self.listener.type1_3_ts[0], self.listener.all_ts[0])
+ self.assertLessEqual(self.listener.all_ts[0], self.listener.type1_ts[0])
+ self.assertLessEqual(self.listener.type2_ts[0], self.listener.all_ts[1])
+ self.assertLessEqual(self.listener.type1_3_ts[1], self.listener.all_ts[2])
+
+ def test_notifications2(self):
+ NotificationQueue.start_queue()
+ for i in range(0, 600):
+ typ = "type{}".format(i % 3 + 1)
+ if random.random() < 0.5:
+ time.sleep(0.002)
+ NotificationQueue.new_notification(typ, i)
+ NotificationQueue.stop()
+ for i in range(0, 600):
+ typ = i % 3 + 1
+ if typ == 1:
+ self.assertIn(i, self.listener.type1)
+ self.assertIn(i, self.listener.type1_3)
+ elif typ == 2:
+ self.assertIn(i, self.listener.type2)
+ elif typ == 3:
+ self.assertIn(i, self.listener.type1_3)
+ self.assertIn(i, self.listener.all)
+
+ self.assertEqual(len(self.listener.type1), 200)
+ self.assertEqual(len(self.listener.type2), 200)
+ self.assertEqual(len(self.listener.type1_3), 400)
+ self.assertEqual(len(self.listener.all), 600)
+
+ def test_deregister(self):
+ NotificationQueue.start_queue()
+ NotificationQueue.new_notification('type1', 1)
+ NotificationQueue.new_notification('type3', 3)
+ NotificationQueue.stop()
+ self.assertEqual(self.listener.type1, [1])
+ self.assertEqual(self.listener.type1_3, [1, 3])
+
+ NotificationQueue.start_queue()
+ NotificationQueue.deregister(self.listener.log_type1_3, ['type1'])
+ NotificationQueue.new_notification('type1', 4)
+ NotificationQueue.new_notification('type3', 5)
+ NotificationQueue.stop()
+ self.assertEqual(self.listener.type1, [1, 4])
+ self.assertEqual(self.listener.type1_3, [1, 3, 5])
diff --git a/src/pybind/mgr/dashboard/tests/test_orchestrator.py b/src/pybind/mgr/dashboard/tests/test_orchestrator.py
new file mode 100644
index 000000000..53e32c85a
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_orchestrator.py
@@ -0,0 +1,40 @@
+import inspect
+import unittest
+from unittest import mock
+
+from orchestrator import Orchestrator as OrchestratorBase
+
+from ..controllers.orchestrator import Orchestrator
+from ..services.orchestrator import OrchFeature
+from ..tests import ControllerTestCase
+
+
+class OrchestratorControllerTest(ControllerTestCase):
+ URL_STATUS = '/ui-api/orchestrator/status'
+ URL_INVENTORY = '/api/orchestrator/inventory'
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([Orchestrator])
+
+ @mock.patch('dashboard.controllers.orchestrator.OrchClient.instance')
+ def test_status_get(self, instance):
+ status = {'available': False, 'description': ''}
+
+ fake_client = mock.Mock()
+ fake_client.status.return_value = status
+ instance.return_value = fake_client
+
+ self._get(self.URL_STATUS)
+ self.assertStatus(200)
+ self.assertJsonBody(status)
+
+
+class TestOrchestrator(unittest.TestCase):
+ def test_features_has_corresponding_methods(self):
+ defined_methods = [v for k, v in inspect.getmembers(
+ OrchFeature, lambda m: not inspect.isroutine(m)) if not k.startswith('_')]
+ orchestrator_methods = [k for k, v in inspect.getmembers(
+ OrchestratorBase, inspect.isroutine)]
+ for method in defined_methods:
+ self.assertIn(method, orchestrator_methods)
diff --git a/src/pybind/mgr/dashboard/tests/test_osd.py b/src/pybind/mgr/dashboard/tests/test_osd.py
new file mode 100644
index 000000000..5d1b0c6be
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_osd.py
@@ -0,0 +1,434 @@
+# -*- coding: utf-8 -*-
+import uuid
+from contextlib import contextmanager
+from typing import Any, Dict, List, Optional
+from unittest import mock
+
+from ceph.deployment.drive_group import DeviceSelection, DriveGroupSpec # type: ignore
+from ceph.deployment.service_spec import PlacementSpec
+
+from .. import mgr
+from ..controllers.osd import Osd, OsdUi
+from ..services.osd import OsdDeploymentOptions
+from ..tests import ControllerTestCase
+from ..tools import NotificationQueue, TaskManager
+from .helper import update_dict # pylint: disable=import-error
+
+
+class OsdHelper(object):
+ DEFAULT_OSD_IDS = [0, 1, 2]
+
+ @staticmethod
+ def _gen_osdmap_tree_node(node_id: int, node_type: str, children: Optional[List[int]] = None,
+ update_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
+ assert node_type in ['root', 'host', 'osd']
+ if node_type in ['root', 'host']:
+ assert children is not None
+
+ node_types = {
+ 'root': {
+ 'id': node_id,
+ 'name': 'default',
+ 'type': 'root',
+ 'type_id': 10,
+ 'children': children,
+ },
+ 'host': {
+ 'id': node_id,
+ 'name': 'ceph-1',
+ 'type': 'host',
+ 'type_id': 1,
+ 'pool_weights': {},
+ 'children': children,
+ },
+ 'osd': {
+ 'id': node_id,
+ 'device_class': 'hdd',
+ 'type': 'osd',
+ 'type_id': 0,
+ 'crush_weight': 0.009796142578125,
+ 'depth': 2,
+ 'pool_weights': {},
+ 'exists': 1,
+ 'status': 'up',
+ 'reweight': 1.0,
+ 'primary_affinity': 1.0,
+ 'name': 'osd.{}'.format(node_id),
+ }
+ }
+ node = node_types[node_type]
+
+ return update_dict(node, update_data) if update_data else node
+
+ @staticmethod
+ def _gen_osd_stats(osd_id: int, update_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
+ stats = {
+ 'osd': osd_id,
+ 'up_from': 11,
+ 'seq': 47244640581,
+ 'num_pgs': 50,
+ 'kb': 10551288,
+ 'kb_used': 1119736,
+ 'kb_used_data': 5504,
+ 'kb_used_omap': 0,
+ 'kb_used_meta': 1048576,
+ 'kb_avail': 9431552,
+ 'statfs': {
+ 'total': 10804518912,
+ 'available': 9657909248,
+ 'internally_reserved': 1073741824,
+ 'allocated': 5636096,
+ 'data_stored': 102508,
+ 'data_compressed': 0,
+ 'data_compressed_allocated': 0,
+ 'data_compressed_original': 0,
+ 'omap_allocated': 0,
+ 'internal_metadata': 1073741824
+ },
+ 'hb_peers': [0, 1],
+ 'snap_trim_queue_len': 0,
+ 'num_snap_trimming': 0,
+ 'op_queue_age_hist': {
+ 'histogram': [],
+ 'upper_bound': 1
+ },
+ 'perf_stat': {
+ 'commit_latency_ms': 0.0,
+ 'apply_latency_ms': 0.0,
+ 'commit_latency_ns': 0,
+ 'apply_latency_ns': 0
+ },
+ 'alerts': [],
+ }
+ return stats if not update_data else update_dict(stats, update_data)
+
+ @staticmethod
+ def _gen_osd_map_osd(osd_id: int) -> Dict[str, Any]:
+ return {
+ 'osd': osd_id,
+ 'up': 1,
+ 'in': 1,
+ 'weight': 1.0,
+ 'primary_affinity': 1.0,
+ 'last_clean_begin': 0,
+ 'last_clean_end': 0,
+ 'up_from': 5,
+ 'up_thru': 21,
+ 'down_at': 0,
+ 'lost_at': 0,
+ 'public_addrs': {
+ 'addrvec': [{
+ 'type': 'v2',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6802'
+ }, {
+ 'type': 'v1',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6803'
+ }]
+ },
+ 'cluster_addrs': {
+ 'addrvec': [{
+ 'type': 'v2',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6804'
+ }, {
+ 'type': 'v1',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6805'
+ }]
+ },
+ 'heartbeat_back_addrs': {
+ 'addrvec': [{
+ 'type': 'v2',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6808'
+ }, {
+ 'type': 'v1',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6809'
+ }]
+ },
+ 'heartbeat_front_addrs': {
+ 'addrvec': [{
+ 'type': 'v2',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6806'
+ }, {
+ 'type': 'v1',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6807'
+ }]
+ },
+ 'state': ['exists', 'up'],
+ 'uuid': str(uuid.uuid4()),
+ 'public_addr': '172.23.0.2:6803/1302',
+ 'cluster_addr': '172.23.0.2:6805/1302',
+ 'heartbeat_back_addr': '172.23.0.2:6809/1302',
+ 'heartbeat_front_addr': '172.23.0.2:6807/1302',
+ 'id': osd_id,
+ }
+
+ @classmethod
+ def gen_osdmap(cls, ids: Optional[List[int]] = None) -> Dict[str, Any]:
+ return {str(i): cls._gen_osd_map_osd(i) for i in ids or cls.DEFAULT_OSD_IDS}
+
+ @classmethod
+ def gen_osd_stats(cls, ids: Optional[List[int]] = None) -> List[Dict[str, Any]]:
+ return [cls._gen_osd_stats(i) for i in ids or cls.DEFAULT_OSD_IDS]
+
+ @classmethod
+ def gen_osdmap_tree_nodes(cls, ids: Optional[List[int]] = None) -> List[Dict[str, Any]]:
+ return [
+ cls._gen_osdmap_tree_node(-1, 'root', [-3]),
+ cls._gen_osdmap_tree_node(-3, 'host', ids or cls.DEFAULT_OSD_IDS),
+ ] + [cls._gen_osdmap_tree_node(node_id, 'osd') for node_id in ids or cls.DEFAULT_OSD_IDS]
+
+ @classmethod
+ def gen_mgr_get_counter(cls) -> List[List[int]]:
+ return [[1551973855, 35], [1551973860, 35], [1551973865, 35], [1551973870, 35]]
+
+ @staticmethod
+ def mock_inventory_host(orch_client_mock, devices_data: Dict[str, str]) -> None:
+ class MockDevice:
+ def __init__(self, human_readable_type, path, available=True):
+ self.human_readable_type = human_readable_type
+ self.available = available
+ self.path = path
+
+ def create_invetory_host(host, devices_data):
+ inventory_host = mock.Mock()
+ inventory_host.devices.devices = []
+ for data in devices_data:
+ if data['host'] != host:
+ continue
+ inventory_host.devices.devices.append(MockDevice(data['type'], data['path']))
+ return inventory_host
+
+ hosts = set()
+ for device in devices_data:
+ hosts.add(device['host'])
+
+ inventory = [create_invetory_host(host, devices_data) for host in hosts]
+ orch_client_mock.inventory.list.return_value = inventory
+
+
+class OsdTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([Osd, OsdUi])
+ NotificationQueue.start_queue()
+ TaskManager.init()
+
+ @classmethod
+ def tearDownClass(cls):
+ NotificationQueue.stop()
+
+ @contextmanager
+ def _mock_osd_list(self, osd_stat_ids, osdmap_tree_node_ids, osdmap_ids):
+ def mgr_get_replacement(*args, **kwargs):
+ method = args[0] or kwargs['method']
+ if method == 'osd_stats':
+ return {'osd_stats': OsdHelper.gen_osd_stats(osd_stat_ids)}
+ if method == 'osd_map_tree':
+ return {'nodes': OsdHelper.gen_osdmap_tree_nodes(osdmap_tree_node_ids)}
+ raise NotImplementedError()
+
+ def mgr_get_counter_replacement(svc_type, _, path):
+ if svc_type == 'osd':
+ return {path: OsdHelper.gen_mgr_get_counter()}
+ raise NotImplementedError()
+
+ with mock.patch.object(Osd, 'get_osd_map', return_value=OsdHelper.gen_osdmap(osdmap_ids)):
+ with mock.patch.object(mgr, 'get', side_effect=mgr_get_replacement):
+ with mock.patch.object(mgr, 'get_counter', side_effect=mgr_get_counter_replacement):
+ with mock.patch.object(mgr, 'get_latest', return_value=1146609664):
+ with mock.patch.object(Osd, 'get_removing_osds', return_value=[]):
+ yield
+
+ def _get_drive_group_data(self, service_id='all_hdd', host_pattern_k='host_pattern',
+ host_pattern_v='*'):
+ return {
+ 'method': 'drive_groups',
+ 'data': [
+ {
+ 'service_type': 'osd',
+ 'service_id': service_id,
+ 'data_devices': {
+ 'rotational': True
+ },
+ host_pattern_k: host_pattern_v
+ }
+ ],
+ 'tracking_id': 'all_hdd, b_ssd'
+ }
+
+ def test_osd_list_aggregation(self):
+ """
+ This test emulates the state of a cluster where an OSD has only been
+ removed (with e.g. `ceph osd rm`), but it hasn't been removed from the
+ CRUSH map. Ceph reports a health warning alongside a `1 osds exist in
+ the crush map but not in the osdmap` warning in such a case.
+ """
+ osds_actual = [0, 1]
+ osds_leftover = [0, 1, 2]
+ with self._mock_osd_list(osd_stat_ids=osds_actual, osdmap_tree_node_ids=osds_leftover,
+ osdmap_ids=osds_actual):
+ self._get('/api/osd')
+ self.assertEqual(len(self.json_body()), 2, 'It should display two OSDs without failure')
+ self.assertStatus(200)
+
+ @mock.patch('dashboard.controllers.osd.CephService')
+ def test_osd_create_bare(self, ceph_service):
+ ceph_service.send_command.return_value = '5'
+ sample_data = {
+ 'uuid': 'f860ca2e-757d-48ce-b74a-87052cad563f',
+ 'svc_id': 5
+ }
+
+ data = {
+ 'method': 'bare',
+ 'data': sample_data,
+ 'tracking_id': 'bare-5'
+ }
+ self._task_post('/api/osd', data)
+ self.assertStatus(201)
+ ceph_service.send_command.assert_called()
+
+ @mock.patch('dashboard.controllers.orchestrator.OrchClient.instance')
+ def test_osd_create_with_drive_groups(self, instance):
+ # without orchestrator service
+ fake_client = mock.Mock()
+ instance.return_value = fake_client
+
+ # Valid DriveGroup
+ data = self._get_drive_group_data()
+
+ # Without orchestrator service
+ fake_client.available.return_value = False
+ self._task_post('/api/osd', data)
+ self.assertStatus(503)
+
+ # With orchestrator service
+ fake_client.available.return_value = True
+ fake_client.get_missing_features.return_value = []
+ self._task_post('/api/osd', data)
+ self.assertStatus(201)
+ dg_specs = [DriveGroupSpec(placement=PlacementSpec(host_pattern='*'),
+ service_id='all_hdd',
+ service_type='osd',
+ data_devices=DeviceSelection(rotational=True))]
+ fake_client.osds.create.assert_called_with(dg_specs)
+
+ @mock.patch('dashboard.controllers.orchestrator.OrchClient.instance')
+ def test_osd_create_with_invalid_drive_groups(self, instance):
+ # without orchestrator service
+ fake_client = mock.Mock()
+ instance.return_value = fake_client
+ fake_client.get_missing_features.return_value = []
+
+ # Invalid DriveGroup
+ data = self._get_drive_group_data('invalid_dg', 'host_pattern_wrong', 'unknown')
+ self._task_post('/api/osd', data)
+ self.assertStatus(400)
+
+ @mock.patch('dashboard.controllers.osd.CephService')
+ def test_osd_mark_all_actions(self, instance):
+ fake_client = mock.Mock()
+ instance.return_value = fake_client
+ action_list = ['OUT', 'IN', 'DOWN']
+ for action in action_list:
+ data = {'action': action}
+ self._task_put('/api/osd/1/mark', data)
+ self.assertStatus(200)
+
+ def _get_deployment_options(self, fake_client, devices_data: Dict[str, str]) -> Dict[str, Any]:
+ OsdHelper.mock_inventory_host(fake_client, devices_data)
+ self._get('/ui-api/osd/deployment_options')
+ self.assertStatus(200)
+ res = self.json_body()
+ return res
+
+ @mock.patch('dashboard.controllers.orchestrator.OrchClient.instance')
+ def test_deployment_options(self, instance):
+ fake_client = mock.Mock()
+ instance.return_value = fake_client
+ fake_client.get_missing_features.return_value = []
+
+ devices_data = [
+ {'type': 'hdd', 'path': '/dev/sda', 'host': 'host1'},
+ {'type': 'hdd', 'path': '/dev/sdc', 'host': 'host1'},
+ {'type': 'hdd', 'path': '/dev/sdb', 'host': 'host2'},
+ {'type': 'hdd', 'path': '/dev/sde', 'host': 'host1'},
+ {'type': 'hdd', 'path': '/dev/sdd', 'host': 'host2'},
+ ]
+
+ res = self._get_deployment_options(fake_client, devices_data)
+ self.assertTrue(res['options'][OsdDeploymentOptions.COST_CAPACITY]['available'])
+ assert res['recommended_option'] == OsdDeploymentOptions.COST_CAPACITY
+
+ # we don't want cost_capacity enabled without hdds
+ for data in devices_data:
+ data['type'] = 'ssd'
+
+ res = self._get_deployment_options(fake_client, devices_data)
+ self.assertFalse(res['options'][OsdDeploymentOptions.COST_CAPACITY]['available'])
+ self.assertFalse(res['options'][OsdDeploymentOptions.THROUGHPUT]['available'])
+ self.assertEqual(res['recommended_option'], None)
+
+ @mock.patch('dashboard.controllers.orchestrator.OrchClient.instance')
+ def test_deployment_options_throughput(self, instance):
+ fake_client = mock.Mock()
+ instance.return_value = fake_client
+ fake_client.get_missing_features.return_value = []
+
+ devices_data = [
+ {'type': 'ssd', 'path': '/dev/sda', 'host': 'host1'},
+ {'type': 'ssd', 'path': '/dev/sdc', 'host': 'host1'},
+ {'type': 'ssd', 'path': '/dev/sdb', 'host': 'host2'},
+ {'type': 'hdd', 'path': '/dev/sde', 'host': 'host1'},
+ {'type': 'hdd', 'path': '/dev/sdd', 'host': 'host2'},
+ ]
+
+ res = self._get_deployment_options(fake_client, devices_data)
+ self.assertTrue(res['options'][OsdDeploymentOptions.COST_CAPACITY]['available'])
+ self.assertTrue(res['options'][OsdDeploymentOptions.THROUGHPUT]['available'])
+ self.assertFalse(res['options'][OsdDeploymentOptions.IOPS]['available'])
+ assert res['recommended_option'] == OsdDeploymentOptions.THROUGHPUT
+
+ @mock.patch('dashboard.controllers.orchestrator.OrchClient.instance')
+ def test_deployment_options_with_hdds_and_nvmes(self, instance):
+ fake_client = mock.Mock()
+ instance.return_value = fake_client
+ fake_client.get_missing_features.return_value = []
+
+ devices_data = [
+ {'type': 'ssd', 'path': '/dev/nvme01', 'host': 'host1'},
+ {'type': 'ssd', 'path': '/dev/nvme02', 'host': 'host1'},
+ {'type': 'ssd', 'path': '/dev/nvme03', 'host': 'host2'},
+ {'type': 'hdd', 'path': '/dev/sde', 'host': 'host1'},
+ {'type': 'hdd', 'path': '/dev/sdd', 'host': 'host2'},
+ ]
+
+ res = self._get_deployment_options(fake_client, devices_data)
+ self.assertTrue(res['options'][OsdDeploymentOptions.COST_CAPACITY]['available'])
+ self.assertFalse(res['options'][OsdDeploymentOptions.THROUGHPUT]['available'])
+ self.assertTrue(res['options'][OsdDeploymentOptions.IOPS]['available'])
+ assert res['recommended_option'] == OsdDeploymentOptions.COST_CAPACITY
+
+ @mock.patch('dashboard.controllers.orchestrator.OrchClient.instance')
+ def test_deployment_options_iops(self, instance):
+ fake_client = mock.Mock()
+ instance.return_value = fake_client
+ fake_client.get_missing_features.return_value = []
+
+ devices_data = [
+ {'type': 'ssd', 'path': '/dev/nvme01', 'host': 'host1'},
+ {'type': 'ssd', 'path': '/dev/nvme02', 'host': 'host1'},
+ {'type': 'ssd', 'path': '/dev/nvme03', 'host': 'host2'}
+ ]
+
+ res = self._get_deployment_options(fake_client, devices_data)
+ self.assertFalse(res['options'][OsdDeploymentOptions.COST_CAPACITY]['available'])
+ self.assertFalse(res['options'][OsdDeploymentOptions.THROUGHPUT]['available'])
+ self.assertTrue(res['options'][OsdDeploymentOptions.IOPS]['available'])
diff --git a/src/pybind/mgr/dashboard/tests/test_plugin_debug.py b/src/pybind/mgr/dashboard/tests/test_plugin_debug.py
new file mode 100644
index 000000000..572f82a41
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_plugin_debug.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from ..tests import CLICommandTestMixin, ControllerTestCase
+
+
+class TestPluginDebug(ControllerTestCase, CLICommandTestMixin):
+ @classmethod
+ def setup_server(cls):
+ # pylint: disable=protected-access
+ cls.setup_controllers([])
+
+ def setUp(self):
+ self.mock_kv_store()
+
+ def test_debug_disabled(self):
+ self.exec_cmd('debug', action='disable')
+
+ self._get('/api/unexisting_controller')
+ self.assertStatus(404)
+
+ data = self.json_body()
+ self.assertGreater(len(data), 0)
+ self.assertNotIn('traceback', data)
+ self.assertNotIn('version', data)
+ self.assertIn('request_id', data)
+
+ def test_debug_enabled(self):
+ self.exec_cmd('debug', action='enable')
+
+ self._get('/api/unexisting_controller')
+ self.assertStatus(404)
+
+ data = self.json_body()
+ self.assertGreater(len(data), 0)
+ self.assertIn('traceback', data)
+ self.assertIn('version', data)
+ self.assertIn('request_id', data)
diff --git a/src/pybind/mgr/dashboard/tests/test_pool.py b/src/pybind/mgr/dashboard/tests/test_pool.py
new file mode 100644
index 000000000..02e2b641c
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_pool.py
@@ -0,0 +1,121 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=protected-access
+import time
+
+try:
+ import mock
+except ImportError:
+ import unittest.mock as mock
+
+from ..controllers.pool import Pool
+from ..controllers.task import Task
+from ..tests import ControllerTestCase
+from ..tools import NotificationQueue, TaskManager
+
+
+class MockTask(object):
+ percentages = []
+
+ def set_progress(self, percentage):
+ self.percentages.append(percentage)
+
+
+class PoolControllerTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([Pool, Task])
+
+ @mock.patch('dashboard.services.progress.get_progress_tasks')
+ @mock.patch('dashboard.controllers.pool.Pool._get')
+ @mock.patch('dashboard.services.ceph_service.CephService.send_command')
+ def test_creation(self, send_command, _get, get_progress_tasks):
+ _get.side_effect = [{
+ 'pool_name': 'test-pool',
+ 'pg_num': 64,
+ 'pg_num_target': 63,
+ 'pg_placement_num': 64,
+ 'pg_placement_num_target': 63
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 64,
+ 'pg_num_target': 64,
+ 'pg_placement_num': 64,
+ 'pg_placement_num_target': 64
+ }]
+ NotificationQueue.start_queue()
+ TaskManager.init()
+
+ def _send_cmd(*args, **kwargs): # pylint: disable=unused-argument
+ time.sleep(3)
+
+ send_command.side_effect = _send_cmd
+ get_progress_tasks.return_value = [], []
+
+ self._task_post('/api/pool', {
+ 'pool': 'test-pool',
+ 'pool_type': 1,
+ 'pg_num': 64
+ }, 10)
+ self.assertStatus(201)
+ self.assertEqual(_get.call_count, 2)
+ NotificationQueue.stop()
+
+ @mock.patch('dashboard.controllers.pool.Pool._get')
+ def test_wait_for_pgs_without_waiting(self, _get):
+ _get.side_effect = [{
+ 'pool_name': 'test-pool',
+ 'pg_num': 32,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 32,
+ 'pg_placement_num_target': 32
+ }]
+ Pool._wait_for_pgs('test-pool')
+ self.assertEqual(_get.call_count, 1)
+
+ @mock.patch('dashboard.controllers.pool.Pool._get')
+ def test_wait_for_pgs_with_waiting(self, _get):
+ task = MockTask()
+ orig_method = TaskManager.current_task
+ TaskManager.current_task = mock.MagicMock()
+ TaskManager.current_task.return_value = task
+ _get.side_effect = [{
+ 'pool_name': 'test-pool',
+ 'pg_num': 64,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 64,
+ 'pg_placement_num_target': 64
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 63,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 62,
+ 'pg_placement_num_target': 32
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 48,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 48,
+ 'pg_placement_num_target': 32
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 48,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 33,
+ 'pg_placement_num_target': 32
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 33,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 32,
+ 'pg_placement_num_target': 32
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 32,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 32,
+ 'pg_placement_num_target': 32
+ }]
+ Pool._wait_for_pgs('test-pool')
+ self.assertEqual(_get.call_count, 6)
+ self.assertEqual(task.percentages, [0, 5, 50, 73, 98])
+ TaskManager.current_task = orig_method
diff --git a/src/pybind/mgr/dashboard/tests/test_prometheus.py b/src/pybind/mgr/dashboard/tests/test_prometheus.py
new file mode 100644
index 000000000..cd2fb3e8d
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_prometheus.py
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=protected-access
+try:
+ from mock import patch
+except ImportError:
+ from unittest.mock import patch
+
+from .. import mgr
+from ..controllers.prometheus import Prometheus, PrometheusNotifications, PrometheusReceiver
+from ..tests import ControllerTestCase
+
+
+class PrometheusControllerTest(ControllerTestCase):
+ alert_host = 'http://alertmanager:9093/mock'
+ alert_host_api = alert_host + '/api/v1'
+
+ prometheus_host = 'http://prometheus:9090/mock'
+ prometheus_host_api = prometheus_host + '/api/v1'
+
+ @classmethod
+ def setup_server(cls):
+ settings = {
+ 'ALERTMANAGER_API_HOST': cls.alert_host,
+ 'PROMETHEUS_API_HOST': cls.prometheus_host
+ }
+ mgr.get_module_option.side_effect = settings.get
+ cls.setup_controllers([Prometheus, PrometheusNotifications, PrometheusReceiver])
+
+ def test_rules(self):
+ with patch('requests.request') as mock_request:
+ self._get('/api/prometheus/rules')
+ mock_request.assert_called_with('GET', self.prometheus_host_api + '/rules',
+ json=None, params={}, verify=True)
+
+ def test_list(self):
+ with patch('requests.request') as mock_request:
+ self._get('/api/prometheus')
+ mock_request.assert_called_with('GET', self.alert_host_api + '/alerts',
+ json=None, params={}, verify=True)
+
+ def test_get_silences(self):
+ with patch('requests.request') as mock_request:
+ self._get('/api/prometheus/silences')
+ mock_request.assert_called_with('GET', self.alert_host_api + '/silences',
+ json=None, params={}, verify=True)
+
+ def test_add_silence(self):
+ with patch('requests.request') as mock_request:
+ self._post('/api/prometheus/silence', {'id': 'new-silence'})
+ mock_request.assert_called_with('POST', self.alert_host_api + '/silences',
+ params=None, json={'id': 'new-silence'},
+ verify=True)
+
+ def test_update_silence(self):
+ with patch('requests.request') as mock_request:
+ self._post('/api/prometheus/silence', {'id': 'update-silence'})
+ mock_request.assert_called_with('POST', self.alert_host_api + '/silences',
+ params=None, json={'id': 'update-silence'},
+ verify=True)
+
+ def test_expire_silence(self):
+ with patch('requests.request') as mock_request:
+ self._delete('/api/prometheus/silence/0')
+ mock_request.assert_called_with('DELETE', self.alert_host_api + '/silence/0',
+ json=None, params=None, verify=True)
+
+ def test_silences_empty_delete(self):
+ with patch('requests.request') as mock_request:
+ self._delete('/api/prometheus/silence')
+ mock_request.assert_not_called()
+
+ def test_post_on_receiver(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ self.assertEqual(len(PrometheusReceiver.notifications), 1)
+ notification = PrometheusReceiver.notifications[0]
+ self.assertEqual(notification['name'], 'foo')
+ self.assertTrue(len(notification['notified']) > 20)
+
+ def test_get_empty_list_with_no_notifications(self):
+ PrometheusReceiver.notifications = []
+ self._get('/api/prometheus/notifications')
+ self.assertStatus(200)
+ self.assertJsonBody([])
+ self._get('/api/prometheus/notifications?from=last')
+ self.assertStatus(200)
+ self.assertJsonBody([])
+
+ def test_get_all_notification(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ self._post('/api/prometheus_receiver', {'name': 'bar'})
+ self._get('/api/prometheus/notifications')
+ self.assertStatus(200)
+ self.assertJsonBody(PrometheusReceiver.notifications)
+
+ def test_get_last_notification_with_use_of_last_keyword(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ self._post('/api/prometheus_receiver', {'name': 'bar'})
+ self._get('/api/prometheus/notifications?from=last')
+ self.assertStatus(200)
+ last = PrometheusReceiver.notifications[1]
+ self.assertJsonBody([last])
+
+ def test_get_no_notification_with_unknown_id(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ self._post('/api/prometheus_receiver', {'name': 'bar'})
+ self._get('/api/prometheus/notifications?from=42')
+ self.assertStatus(200)
+ self.assertJsonBody([])
+
+ def test_get_no_notification_since_with_last_notification(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ notification = PrometheusReceiver.notifications[0]
+ self._get('/api/prometheus/notifications?from=' + notification['id'])
+ self.assertStatus(200)
+ self.assertJsonBody([])
+
+ def test_get_notifications_since_last_notification(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foobar'})
+ next_to_last = PrometheusReceiver.notifications[0]
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ self._post('/api/prometheus_receiver', {'name': 'bar'})
+ self._get('/api/prometheus/notifications?from=' + next_to_last['id'])
+ forelast = PrometheusReceiver.notifications[1]
+ last = PrometheusReceiver.notifications[2]
+ self.assertEqual(self.json_body(), [forelast, last])
diff --git a/src/pybind/mgr/dashboard/tests/test_rbd_mirroring.py b/src/pybind/mgr/dashboard/tests/test_rbd_mirroring.py
new file mode 100644
index 000000000..69e3f11e2
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rbd_mirroring.py
@@ -0,0 +1,195 @@
+from __future__ import absolute_import
+
+import json
+
+try:
+ import mock
+except ImportError:
+ import unittest.mock as mock
+
+from .. import mgr
+from ..controllers.orchestrator import Orchestrator
+from ..controllers.rbd_mirroring import RbdMirroring, \
+ RbdMirroringPoolBootstrap, RbdMirroringStatus, RbdMirroringSummary
+from ..controllers.summary import Summary
+from ..services import progress
+from ..tests import ControllerTestCase
+
+mock_list_servers = [{
+ 'hostname': 'ceph-host',
+ 'services': [{'id': 3, 'type': 'rbd-mirror'}]
+}]
+
+mock_get_metadata = {
+ 'id': 1,
+ 'instance_id': 3,
+ 'ceph_version': 'ceph version 13.0.0-5719 mimic (dev)'
+}
+
+_status = {
+ 1: {
+ 'callouts': {},
+ 'image_local_count': 5,
+ 'image_remote_count': 6,
+ 'image_error_count': 7,
+ 'image_warning_count': 8,
+ 'name': 'pool_name'
+ }
+}
+
+mock_get_daemon_status = {
+ 'json': json.dumps(_status)
+}
+
+mock_osd_map = {
+ 'pools': [{
+ 'pool_name': 'rbd',
+ 'application_metadata': {'rbd'}
+ }]
+}
+
+
+class RbdMirroringControllerTest(ControllerTestCase):
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([RbdMirroring])
+
+ @mock.patch('dashboard.controllers.rbd_mirroring.rbd.RBD')
+ def test_site_name(self, mock_rbd):
+ result = {'site_name': 'fsid'}
+ mock_rbd_instance = mock_rbd.return_value
+ mock_rbd_instance.mirror_site_name_get.return_value = \
+ result['site_name']
+
+ self._get('/api/block/mirroring/site_name')
+ self.assertStatus(200)
+ self.assertJsonBody(result)
+
+ result['site_name'] = 'site-a'
+ mock_rbd_instance.mirror_site_name_get.return_value = \
+ result['site_name']
+ self._put('/api/block/mirroring/site_name', result)
+ self.assertStatus(200)
+ self.assertJsonBody(result)
+ mock_rbd_instance.mirror_site_name_set.assert_called_with(
+ mock.ANY, result['site_name'])
+
+
+class RbdMirroringPoolBootstrapControllerTest(ControllerTestCase):
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([RbdMirroringPoolBootstrap])
+
+ @mock.patch('dashboard.controllers.rbd_mirroring.rbd.RBD')
+ def test_token(self, mock_rbd):
+ mock_rbd_instance = mock_rbd.return_value
+ mock_rbd_instance.mirror_peer_bootstrap_create.return_value = "1234"
+
+ self._post('/api/block/mirroring/pool/abc/bootstrap/token')
+ self.assertStatus(200)
+ self.assertJsonBody({"token": "1234"})
+ mgr.rados.open_ioctx.assert_called_with("abc")
+
+ mock_rbd_instance.mirror_peer_bootstrap_create.assert_called()
+
+ @mock.patch('dashboard.controllers.rbd_mirroring.rbd')
+ def test_peer(self, mock_rbd_module):
+ mock_rbd_instance = mock_rbd_module.RBD.return_value
+
+ values = {
+ "direction": "invalid",
+ "token": "1234"
+ }
+ self._post('/api/block/mirroring/pool/abc/bootstrap/peer', values)
+ self.assertStatus(500)
+ mgr.rados.open_ioctx.assert_called_with("abc")
+
+ values["direction"] = "rx"
+ self._post('/api/block/mirroring/pool/abc/bootstrap/peer', values)
+ self.assertStatus(200)
+ self.assertJsonBody({})
+ mgr.rados.open_ioctx.assert_called_with("abc")
+
+ mock_rbd_instance.mirror_peer_bootstrap_import.assert_called_with(
+ mock.ANY, mock_rbd_module.RBD_MIRROR_PEER_DIRECTION_RX, '1234')
+
+
+class RbdMirroringSummaryControllerTest(ControllerTestCase):
+
+ @classmethod
+ def setup_server(cls):
+ mgr.list_servers.return_value = mock_list_servers
+ mgr.get_metadata = mock.Mock(return_value=mock_get_metadata)
+ mgr.get_daemon_status.return_value = mock_get_daemon_status
+ mgr.get.side_effect = lambda key: {
+ 'osd_map': mock_osd_map,
+ 'health': {'json': '{"status": 1}'},
+ 'fs_map': {'filesystems': []},
+ 'mgr_map': {
+ 'services': {
+ 'dashboard': 'https://ceph.dev:11000/'
+ },
+ }
+ }[key]
+ mgr.url_prefix = ''
+ mgr.get_mgr_id.return_value = 0
+ mgr.have_mon_connection.return_value = True
+ mgr.version = 'ceph version 13.1.0-534-g23d3751b89 ' \
+ '(23d3751b897b31d2bda57aeaf01acb5ff3c4a9cd) ' \
+ 'nautilus (dev)'
+
+ progress.get_progress_tasks = mock.MagicMock()
+ progress.get_progress_tasks.return_value = ([], [])
+
+ cls.setup_controllers([RbdMirroringSummary, Summary], '/test')
+
+ @mock.patch('dashboard.controllers.rbd_mirroring.rbd.RBD')
+ def test_default(self, mock_rbd):
+ mock_rbd_instance = mock_rbd.return_value
+ mock_rbd_instance.mirror_site_name_get.return_value = 'site-a'
+
+ self._get('/test/api/block/mirroring/summary')
+ result = self.json_body()
+ self.assertStatus(200)
+ self.assertEqual(result['site_name'], 'site-a')
+ self.assertEqual(result['status'], 0)
+ for k in ['daemons', 'pools', 'image_error', 'image_syncing', 'image_ready']:
+ self.assertIn(k, result['content_data'])
+
+ @mock.patch('dashboard.controllers.BaseController._has_permissions')
+ @mock.patch('dashboard.controllers.rbd_mirroring.rbd.RBD')
+ def test_summary(self, mock_rbd, has_perms_mock):
+ """We're also testing `summary`, as it also uses code from `rbd_mirroring.py`"""
+ mock_rbd_instance = mock_rbd.return_value
+ mock_rbd_instance.mirror_site_name_get.return_value = 'site-a'
+
+ has_perms_mock.return_value = True
+ self._get('/test/api/summary')
+ self.assertStatus(200)
+
+ summary = self.json_body()['rbd_mirroring']
+ self.assertEqual(summary, {'errors': 0, 'warnings': 1})
+
+
+class RbdMirroringStatusControllerTest(ControllerTestCase):
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([RbdMirroringStatus, Orchestrator])
+
+ @mock.patch('dashboard.controllers.orchestrator.OrchClient.instance')
+ def test_status(self, instance):
+ status = {'available': False, 'description': ''}
+ fake_client = mock.Mock()
+ fake_client.status.return_value = status
+ instance.return_value = fake_client
+
+ self._get('/ui-api/block/mirroring/status')
+ self.assertStatus(200)
+ self.assertJsonBody({'available': True, 'message': None})
+
+ def test_configure(self):
+ self._post('/ui-api/block/mirroring/configure')
+ self.assertStatus(200)
diff --git a/src/pybind/mgr/dashboard/tests/test_rbd_service.py b/src/pybind/mgr/dashboard/tests/test_rbd_service.py
new file mode 100644
index 000000000..ad1825300
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rbd_service.py
@@ -0,0 +1,180 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=dangerous-default-value,too-many-public-methods
+from __future__ import absolute_import
+
+import unittest
+from datetime import datetime
+from unittest.mock import MagicMock
+
+try:
+ import mock
+except ImportError:
+ import unittest.mock as mock
+
+from .. import mgr
+from ..services.rbd import RbdConfiguration, RBDSchedulerInterval, RbdService, \
+ get_image_spec, parse_image_spec
+
+
+class ImageNotFoundStub(Exception):
+ def __init__(self, message, errno=None):
+ super(ImageNotFoundStub, self).__init__(
+ 'RBD image not found (%s)' % message, errno)
+
+
+class RbdServiceTest(unittest.TestCase):
+
+ def setUp(self):
+ # pylint: disable=protected-access
+ RbdService._rbd_inst = mock.Mock()
+ self.rbd_inst_mock = RbdService._rbd_inst
+
+ def test_compose_image_spec(self):
+ self.assertEqual(get_image_spec('mypool', 'myns', 'myimage'), 'mypool/myns/myimage')
+ self.assertEqual(get_image_spec('mypool', None, 'myimage'), 'mypool/myimage')
+
+ def test_parse_image_spec(self):
+ self.assertEqual(parse_image_spec('mypool/myns/myimage'), ('mypool', 'myns', 'myimage'))
+ self.assertEqual(parse_image_spec('mypool/myimage'), ('mypool', None, 'myimage'))
+
+ @mock.patch('dashboard.services.rbd.RbdConfiguration._rbd.config_list')
+ @mock.patch('dashboard.mgr.get')
+ @mock.patch('dashboard.services.ceph_service.CephService.get_pool_list')
+ def test_pool_rbd_configuration_with_different_pg_states(self, get_pool_list, get, config_list):
+ get_pool_list.return_value = [{
+ 'pool_name': 'good-pool',
+ 'pool': 1,
+ }, {
+ 'pool_name': 'bad-pool',
+ 'pool': 2,
+ }]
+ get.return_value = {
+ 'by_pool': {
+ '1': {'active+clean': 32},
+ '2': {'creating+incomplete': 32},
+ }
+ }
+ config_list.return_value = [1, 2, 3]
+ config = RbdConfiguration('bad-pool')
+ self.assertEqual(config.list(), [])
+ config = RbdConfiguration('good-pool')
+ self.assertEqual(config.list(), [1, 2, 3])
+
+ def test_rbd_image_stat_removing(self):
+ time = datetime.utcnow()
+ self.rbd_inst_mock.trash_get.return_value = {
+ 'id': '3c1a5ee60a88',
+ 'name': 'test_rbd',
+ 'source': 'REMOVING',
+ 'deletion_time': time,
+ 'deferment_end_time': time
+ }
+
+ ioctx_mock = MagicMock()
+
+ # pylint: disable=protected-access
+ rbd = RbdService._rbd_image_stat_removing(ioctx_mock, 'test_pool', '', '3c1a5ee60a88')
+ self.assertEqual(rbd, {
+ 'id': '3c1a5ee60a88',
+ 'unique_id': 'test_pool/3c1a5ee60a88',
+ 'name': 'test_rbd',
+ 'source': 'REMOVING',
+ 'deletion_time': '{}Z'.format(time.isoformat()),
+ 'deferment_end_time': '{}Z'.format(time.isoformat()),
+ 'pool_name': 'test_pool',
+ 'namespace': ''
+ })
+
+ @mock.patch('dashboard.services.rbd.rbd.ImageNotFound', new_callable=lambda: ImageNotFoundStub)
+ def test_rbd_image_stat_filter_source_user(self, _):
+ self.rbd_inst_mock.trash_get.return_value = {
+ 'id': '3c1a5ee60a88',
+ 'name': 'test_rbd',
+ 'source': 'USER'
+ }
+
+ ioctx_mock = MagicMock()
+ with self.assertRaises(ImageNotFoundStub) as ctx:
+ # pylint: disable=protected-access
+ RbdService._rbd_image_stat_removing(ioctx_mock, 'test_pool', '', '3c1a5ee60a88')
+ self.assertIn('No image test_pool/3c1a5ee60a88 in status `REMOVING` found.',
+ str(ctx.exception))
+
+ @mock.patch('dashboard.services.rbd.rbd.ImageNotFound', new_callable=lambda: ImageNotFoundStub)
+ @mock.patch('dashboard.services.rbd.RbdService._pool_namespaces')
+ @mock.patch('dashboard.services.rbd.RbdService._rbd_image_stat_removing')
+ @mock.patch('dashboard.services.rbd.RbdService._rbd_image_stat')
+ @mock.patch('dashboard.services.rbd.RbdService._rbd_image_refs')
+ def test_rbd_pool_list(self, rbd_image_ref_mock, rbd_image_stat_mock,
+ rbd_image_stat_removing_mock, pool_namespaces, _):
+ time = datetime.utcnow()
+
+ ioctx_mock = MagicMock()
+ mgr.rados = MagicMock()
+ mgr.rados.open_ioctx.return_value = ioctx_mock
+
+ self.rbd_inst_mock.namespace_list.return_value = []
+ rbd_image_ref_mock.return_value = [{'name': 'test_rbd', 'id': '3c1a5ee60a88'}]
+ pool_namespaces.return_value = ['']
+
+ rbd_image_stat_mock.side_effect = mock.Mock(side_effect=ImageNotFoundStub(
+ 'RBD image not found test_pool/3c1a5ee60a88'))
+
+ rbd_image_stat_removing_mock.return_value = {
+ 'id': '3c1a5ee60a88',
+ 'unique_id': 'test_pool/3c1a5ee60a88',
+ 'name': 'test_rbd',
+ 'source': 'REMOVING',
+ 'deletion_time': '{}Z'.format(time.isoformat()),
+ 'deferment_end_time': '{}Z'.format(time.isoformat()),
+ 'pool_name': 'test_pool',
+ 'namespace': ''
+ }
+
+ # test with limit 0, it should return a list of pools with an empty list, but
+ rbd_pool_list = RbdService.rbd_pool_list(['test_pool'], offset=0, limit=0)
+ self.assertEqual(rbd_pool_list, ([], 1))
+
+ self.rbd_inst_mock.namespace_list.return_value = []
+
+ rbd_pool_list = RbdService.rbd_pool_list(['test_pool'], offset=0, limit=5)
+ self.assertEqual(rbd_pool_list, ([{
+ 'id': '3c1a5ee60a88',
+ 'unique_id': 'test_pool/3c1a5ee60a88',
+ 'name': 'test_rbd',
+ 'source': 'REMOVING',
+ 'deletion_time': '{}Z'.format(time.isoformat()),
+ 'deferment_end_time': '{}Z'.format(time.isoformat()),
+ 'pool_name': 'test_pool',
+ 'namespace': ''
+ }], 1))
+
+ def test_valid_interval(self):
+ test_cases = [
+ ('15m', False),
+ ('1h', False),
+ ('5d', False),
+ ('m', True),
+ ('d', True),
+ ('1s', True),
+ ('11', True),
+ ('1m1', True),
+ ]
+ for interval, error in test_cases:
+ if error:
+ with self.assertRaises(ValueError):
+ RBDSchedulerInterval(interval)
+ else:
+ self.assertEqual(str(RBDSchedulerInterval(interval)), interval)
+
+ def test_rbd_image_refs_cache(self):
+ ioctx_mock = MagicMock()
+ mgr.rados = MagicMock()
+ mgr.rados.open_ioctx.return_value = ioctx_mock
+ images = [{'image': str(i), 'id': str(i)} for i in range(10)]
+ for i in range(5):
+ self.rbd_inst_mock.list2.return_value = images[i*2:(i*2)+2]
+ ioctx_mock = MagicMock()
+ # pylint: disable=protected-access
+ res = RbdService._rbd_image_refs(ioctx_mock, str(i))
+ self.assertEqual(res, images[i*2:(i*2)+2])
diff --git a/src/pybind/mgr/dashboard/tests/test_rest_client.py b/src/pybind/mgr/dashboard/tests/test_rest_client.py
new file mode 100644
index 000000000..2df6763f9
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rest_client.py
@@ -0,0 +1,110 @@
+# -*- coding: utf-8 -*-
+import unittest
+
+import requests.exceptions
+
+try:
+ from mock import patch
+except ImportError:
+ from unittest.mock import patch
+
+from urllib3.exceptions import MaxRetryError, ProtocolError
+
+from .. import mgr
+from ..rest_client import RequestException, RestClient
+
+
+class RestClientTestClass(RestClient):
+ """RestClient subclass for testing purposes."""
+ @RestClient.api_get('/')
+ def fake_endpoint_method_with_annotation(self, request=None) -> bool:
+ pass
+
+
+class RestClientTest(unittest.TestCase):
+ def setUp(self):
+ settings = {'REST_REQUESTS_TIMEOUT': 45}
+ mgr.get_module_option.side_effect = settings.get
+
+ def test_timeout_auto_set(self):
+ with patch('requests.Session.request') as mock_request:
+ rest_client = RestClient('localhost', 8000)
+ rest_client.session.request('GET', '/test')
+ mock_request.assert_called_with('GET', '/test', timeout=45)
+
+ def test_timeout_auto_set_arg(self):
+ with patch('requests.Session.request') as mock_request:
+ rest_client = RestClient('localhost', 8000)
+ rest_client.session.request(
+ 'GET', '/test', None, None, None, None,
+ None, None, None)
+ mock_request.assert_called_with(
+ 'GET', '/test', None, None, None, None,
+ None, None, None, timeout=45)
+
+ def test_timeout_no_auto_set_kwarg(self):
+ with patch('requests.Session.request') as mock_request:
+ rest_client = RestClient('localhost', 8000)
+ rest_client.session.request('GET', '/test', timeout=20)
+ mock_request.assert_called_with('GET', '/test', timeout=20)
+
+ def test_timeout_no_auto_set_arg(self):
+ with patch('requests.Session.request') as mock_request:
+ rest_client = RestClient('localhost', 8000)
+ rest_client.session.request(
+ 'GET', '/test', None, None, None, None,
+ None, None, 40)
+ mock_request.assert_called_with(
+ 'GET', '/test', None, None, None, None,
+ None, None, 40)
+
+
+class RestClientDoRequestTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.mock_requests = patch('requests.Session').start()
+ cls.rest_client = RestClientTestClass('localhost', 8000, 'UnitTest')
+
+ def test_endpoint_method_with_annotation(self):
+ self.assertEqual(self.rest_client.fake_endpoint_method_with_annotation(), None)
+
+ def test_do_request_exception_no_args(self):
+ self.mock_requests().get.side_effect = requests.exceptions.ConnectionError()
+ with self.assertRaises(RequestException) as context:
+ self.rest_client.do_request('GET', '/test')
+ self.assertEqual('UnitTest REST API cannot be reached. Please '
+ 'check your configuration and that the API '
+ 'endpoint is accessible',
+ context.exception.message)
+
+ def test_do_request_exception_args_1(self):
+ self.mock_requests().post.side_effect = requests.exceptions.ConnectionError(
+ MaxRetryError('Abc', 'http://xxx.yyy', 'too many redirects'))
+ with self.assertRaises(RequestException) as context:
+ self.rest_client.do_request('POST', '/test')
+ self.assertEqual('UnitTest REST API cannot be reached. Please '
+ 'check your configuration and that the API '
+ 'endpoint is accessible',
+ context.exception.message)
+
+ def test_do_request_exception_args_2(self):
+ self.mock_requests().put.side_effect = requests.exceptions.ConnectionError(
+ ProtocolError('Connection broken: xyz'))
+ with self.assertRaises(RequestException) as context:
+ self.rest_client.do_request('PUT', '/test')
+ self.assertEqual('UnitTest REST API cannot be reached. Please '
+ 'check your configuration and that the API '
+ 'endpoint is accessible',
+ context.exception.message)
+
+ def test_do_request_exception_nested_args(self):
+ self.mock_requests().delete.side_effect = requests.exceptions.ConnectionError(
+ MaxRetryError('Xyz', 'https://foo.bar',
+ Exception('Foo: [Errno -42] bla bla bla')))
+ with self.assertRaises(RequestException) as context:
+ self.rest_client.do_request('DELETE', '/test')
+ self.assertEqual('UnitTest REST API cannot be reached: bla '
+ 'bla bla [errno -42]. Please check your '
+ 'configuration and that the API endpoint '
+ 'is accessible',
+ context.exception.message)
diff --git a/src/pybind/mgr/dashboard/tests/test_rest_tasks.py b/src/pybind/mgr/dashboard/tests/test_rest_tasks.py
new file mode 100644
index 000000000..b32029851
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rest_tasks.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+
+import time
+
+try:
+ import mock
+except ImportError:
+ import unittest.mock as mock
+
+from ..controllers import RESTController, Router, Task
+from ..controllers.task import Task as TaskController
+from ..services import progress
+from ..tests import ControllerTestCase
+from ..tools import NotificationQueue, TaskManager
+
+
+@Router('/test/task', secure=False)
+class TaskTest(RESTController):
+ sleep_time = 0.0
+
+ @Task('task/create', {'param': '{param}'}, wait_for=1.0)
+ def create(self, param):
+ time.sleep(TaskTest.sleep_time)
+ return {'my_param': param}
+
+ @Task('task/set', {'param': '{2}'}, wait_for=1.0)
+ def set(self, key, param=None):
+ time.sleep(TaskTest.sleep_time)
+ return {'key': key, 'my_param': param}
+
+ @Task('task/delete', ['{key}'], wait_for=1.0)
+ def delete(self, key):
+ # pylint: disable=unused-argument
+ time.sleep(TaskTest.sleep_time)
+
+ @Task('task/foo', ['{param}'])
+ @RESTController.Collection('POST', path='/foo')
+ def foo_post(self, param):
+ return {'my_param': param}
+
+ @Task('task/bar', ['{key}', '{param}'])
+ @RESTController.Resource('PUT', path='/bar')
+ def bar_put(self, key, param=None):
+ return {'my_param': param, 'key': key}
+
+ @Task('task/query', ['{param}'])
+ @RESTController.Collection('POST', query_params=['param'])
+ def query(self, param=None):
+ return {'my_param': param}
+
+
+class TaskControllerTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ # pylint: disable=protected-access
+ progress.get_progress_tasks = mock.MagicMock()
+ progress.get_progress_tasks.return_value = ([], [])
+
+ NotificationQueue.start_queue()
+ TaskManager.init()
+ cls.setup_controllers([TaskTest, TaskController])
+
+ @classmethod
+ def tearDownClass(cls):
+ NotificationQueue.stop()
+
+ def setUp(self):
+ TaskTest.sleep_time = 0.0
+
+ def test_create_task(self):
+ self._task_post('/test/task', {'param': 'hello'})
+ self.assertJsonBody({'my_param': 'hello'})
+
+ def test_long_set_task(self):
+ TaskTest.sleep_time = 2.0
+ self._task_put('/test/task/2', {'param': 'hello'})
+ self.assertJsonBody({'key': '2', 'my_param': 'hello'})
+
+ def test_delete_task(self):
+ self._task_delete('/test/task/hello')
+
+ def test_foo_task(self):
+ self._task_post('/test/task/foo', {'param': 'hello'})
+ self.assertJsonBody({'my_param': 'hello'})
+
+ def test_bar_task(self):
+ self._task_put('/test/task/3/bar', {'param': 'hello'})
+ self.assertJsonBody({'my_param': 'hello', 'key': '3'})
+
+ def test_query_param(self):
+ self._task_post('/test/task/query')
+ self.assertJsonBody({'my_param': None})
diff --git a/src/pybind/mgr/dashboard/tests/test_rgw.py b/src/pybind/mgr/dashboard/tests/test_rgw.py
new file mode 100644
index 000000000..ce1b5fd92
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rgw.py
@@ -0,0 +1,225 @@
+from unittest.mock import Mock, call, patch
+
+from .. import mgr
+from ..controllers.rgw import Rgw, RgwDaemon, RgwUser
+from ..rest_client import RequestException
+from ..services.rgw_client import RgwClient
+from ..tests import ControllerTestCase, RgwStub
+
+
+class RgwControllerTestCase(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([Rgw], '/test')
+
+ def setUp(self) -> None:
+ RgwStub.get_daemons()
+ RgwStub.get_settings()
+
+ @patch.object(RgwClient, '_get_user_id', Mock(return_value='fake-user'))
+ @patch.object(RgwClient, 'is_service_online', Mock(return_value=True))
+ @patch.object(RgwClient, '_is_system_user', Mock(return_value=True))
+ def test_status_available(self):
+ self._get('/test/ui-api/rgw/status')
+ self.assertStatus(200)
+ self.assertJsonBody({'available': True, 'message': None})
+
+ @patch.object(RgwClient, '_get_user_id', Mock(return_value='fake-user'))
+ @patch.object(RgwClient, 'is_service_online', Mock(
+ side_effect=RequestException('My test error')))
+ def test_status_online_check_error(self):
+ self._get('/test/ui-api/rgw/status')
+ self.assertStatus(200)
+ self.assertJsonBody({'available': False,
+ 'message': 'My test error'})
+
+ @patch.object(RgwClient, '_get_user_id', Mock(return_value='fake-user'))
+ @patch.object(RgwClient, 'is_service_online', Mock(return_value=False))
+ def test_status_not_online(self):
+ self._get('/test/ui-api/rgw/status')
+ self.assertStatus(200)
+ self.assertJsonBody({'available': False,
+ 'message': "Failed to connect to the Object Gateway's Admin Ops API."})
+
+ @patch.object(RgwClient, '_get_user_id', Mock(return_value='fake-user'))
+ @patch.object(RgwClient, 'is_service_online', Mock(return_value=True))
+ @patch.object(RgwClient, '_is_system_user', Mock(return_value=False))
+ def test_status_not_system_user(self):
+ self._get('/test/ui-api/rgw/status')
+ self.assertStatus(200)
+ self.assertJsonBody({'available': False,
+ 'message': 'The system flag is not set for user "fake-user".'})
+
+ def test_status_no_service(self):
+ RgwStub.get_mgr_no_services()
+ self._get('/test/ui-api/rgw/status')
+ self.assertStatus(200)
+ self.assertJsonBody({'available': False, 'message': 'No RGW service is running.'})
+
+
+class RgwDaemonControllerTestCase(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([RgwDaemon], '/test')
+
+ @patch('dashboard.services.rgw_client.RgwClient._get_user_id', Mock(
+ return_value='dummy_admin'))
+ def test_list(self):
+ RgwStub.get_daemons()
+ RgwStub.get_settings()
+ mgr.list_servers.return_value = [{
+ 'hostname': 'host1',
+ 'services': [{'id': '4832', 'type': 'rgw'}, {'id': '5356', 'type': 'rgw'}]
+ }]
+ mgr.get_metadata.side_effect = [
+ {
+ 'ceph_version': 'ceph version master (dev)',
+ 'id': 'daemon1',
+ 'realm_name': 'realm1',
+ 'zonegroup_name': 'zg1',
+ 'zone_name': 'zone1'
+ },
+ {
+ 'ceph_version': 'ceph version master (dev)',
+ 'id': 'daemon2',
+ 'realm_name': 'realm2',
+ 'zonegroup_name': 'zg2',
+ 'zone_name': 'zone2'
+ }]
+ self._get('/test/api/rgw/daemon')
+ self.assertStatus(200)
+ self.assertJsonBody([{
+ 'id': 'daemon1',
+ 'service_map_id': '4832',
+ 'version': 'ceph version master (dev)',
+ 'server_hostname': 'host1',
+ 'realm_name': 'realm1',
+ 'zonegroup_name': 'zg1',
+ 'zone_name': 'zone1', 'default': True
+ },
+ {
+ 'id': 'daemon2',
+ 'service_map_id': '5356',
+ 'version': 'ceph version master (dev)',
+ 'server_hostname': 'host1',
+ 'realm_name': 'realm2',
+ 'zonegroup_name': 'zg2',
+ 'zone_name': 'zone2',
+ 'default': False
+ }])
+
+ def test_list_empty(self):
+ RgwStub.get_mgr_no_services()
+ self._get('/test/api/rgw/daemon')
+ self.assertStatus(200)
+ self.assertJsonBody([])
+
+
+class RgwUserControllerTestCase(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([RgwUser], '/test')
+
+ @patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ def test_user_list(self, mock_proxy):
+ mock_proxy.side_effect = [{
+ 'count': 3,
+ 'keys': ['test1', 'test2', 'test3'],
+ 'truncated': False
+ }]
+ self._get('/test/api/rgw/user?daemon_name=dummy-daemon')
+ self.assertStatus(200)
+ mock_proxy.assert_has_calls([
+ call('dummy-daemon', 'GET', 'user?list', {})
+ ])
+ self.assertJsonBody(['test1', 'test2', 'test3'])
+
+ @patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ def test_user_list_marker(self, mock_proxy):
+ mock_proxy.side_effect = [{
+ 'count': 3,
+ 'keys': ['test1', 'test2', 'test3'],
+ 'marker': 'foo:bar',
+ 'truncated': True
+ }, {
+ 'count': 1,
+ 'keys': ['admin'],
+ 'truncated': False
+ }]
+ self._get('/test/api/rgw/user')
+ self.assertStatus(200)
+ mock_proxy.assert_has_calls([
+ call(None, 'GET', 'user?list', {}),
+ call(None, 'GET', 'user?list', {'marker': 'foo:bar'})
+ ])
+ self.assertJsonBody(['test1', 'test2', 'test3', 'admin'])
+
+ @patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ def test_user_list_duplicate_marker(self, mock_proxy):
+ mock_proxy.side_effect = [{
+ 'count': 3,
+ 'keys': ['test1', 'test2', 'test3'],
+ 'marker': 'foo:bar',
+ 'truncated': True
+ }, {
+ 'count': 3,
+ 'keys': ['test4', 'test5', 'test6'],
+ 'marker': 'foo:bar',
+ 'truncated': True
+ }, {
+ 'count': 1,
+ 'keys': ['admin'],
+ 'truncated': False
+ }]
+ self._get('/test/api/rgw/user')
+ self.assertStatus(500)
+
+ @patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ def test_user_list_invalid_marker(self, mock_proxy):
+ mock_proxy.side_effect = [{
+ 'count': 3,
+ 'keys': ['test1', 'test2', 'test3'],
+ 'marker': 'foo:bar',
+ 'truncated': True
+ }, {
+ 'count': 3,
+ 'keys': ['test4', 'test5', 'test6'],
+ 'marker': '',
+ 'truncated': True
+ }, {
+ 'count': 1,
+ 'keys': ['admin'],
+ 'truncated': False
+ }]
+ self._get('/test/api/rgw/user')
+ self.assertStatus(500)
+
+ @patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ @patch.object(RgwUser, '_keys_allowed')
+ def test_user_get_with_keys(self, keys_allowed, mock_proxy):
+ keys_allowed.return_value = True
+ mock_proxy.return_value = {
+ 'tenant': '',
+ 'user_id': 'my_user_id',
+ 'keys': [],
+ 'swift_keys': []
+ }
+ self._get('/test/api/rgw/user/testuser')
+ self.assertStatus(200)
+ self.assertInJsonBody('keys')
+ self.assertInJsonBody('swift_keys')
+
+ @patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ @patch.object(RgwUser, '_keys_allowed')
+ def test_user_get_without_keys(self, keys_allowed, mock_proxy):
+ keys_allowed.return_value = False
+ mock_proxy.return_value = {
+ 'tenant': '',
+ 'user_id': 'my_user_id',
+ 'keys': [],
+ 'swift_keys': []
+ }
+ self._get('/test/api/rgw/user/testuser')
+ self.assertStatus(200)
+ self.assertNotIn('keys', self.json_body())
+ self.assertNotIn('swift_keys', self.json_body())
diff --git a/src/pybind/mgr/dashboard/tests/test_rgw_client.py b/src/pybind/mgr/dashboard/tests/test_rgw_client.py
new file mode 100644
index 000000000..d23bdec2c
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rgw_client.py
@@ -0,0 +1,355 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=too-many-public-methods
+import errno
+from unittest import TestCase
+from unittest.mock import Mock, patch
+
+from .. import mgr
+from ..exceptions import DashboardException
+from ..services.rgw_client import NoCredentialsException, \
+ NoRgwDaemonsException, RgwClient, _parse_frontend_config
+from ..settings import Settings
+from ..tests import CLICommandTestMixin, RgwStub
+
+
+@patch('dashboard.services.rgw_client.RgwClient._get_user_id', Mock(
+ return_value='dummy_admin'))
+class RgwClientTest(TestCase, CLICommandTestMixin):
+ _dashboard_user_realm1_access_key = 'VUOFXZFK24H81ISTVBTR'
+ _dashboard_user_realm1_secret_key = '0PGsCvXPGWS3AGgibUZEcd9efLrbbshlUkY3jruR'
+ _dashboard_user_realm2_access_key = 'OMDR282VYLBC1ZYMYDL0'
+ _dashboard_user_realm2_secret_key = 'N3thf7jAiwQ90PsPrhC2DIcvCFOsBXtBvPJJMdC3'
+ _radosgw_admin_result_error = (-errno.EINVAL, '', 'fake error')
+ _radosgw_admin_result_no_realms = (0, {}, '')
+ _radosgw_admin_result_realms = (0, {"realms": ["realm1", "realm2"]}, '')
+ _radosgw_admin_result_user_realm1 = (
+ 0,
+ {
+ "keys": [
+ {
+ "user": "dashboard",
+ "access_key": _dashboard_user_realm1_access_key,
+ "secret_key": _dashboard_user_realm1_secret_key
+ }
+ ],
+ "system": "true"
+ },
+ '')
+ _radosgw_admin_result_user_realm2 = (
+ 0,
+ {
+ "keys": [
+ {
+ "user": "dashboard",
+ "access_key": _dashboard_user_realm2_access_key,
+ "secret_key": _dashboard_user_realm2_secret_key
+ }
+ ],
+ "system": "true"
+ },
+ '')
+
+ def setUp(self):
+ RgwStub.get_daemons()
+ self.mock_kv_store()
+ self.CONFIG_KEY_DICT.update({
+ 'RGW_API_ACCESS_KEY': 'klausmustermann',
+ 'RGW_API_SECRET_KEY': 'supergeheim',
+ })
+
+ def test_configure_credentials_error(self):
+ self.CONFIG_KEY_DICT.update({
+ 'RGW_API_ACCESS_KEY': '',
+ 'RGW_API_SECRET_KEY': '',
+ })
+ # Get no realms, get no user, user creation fails.
+ mgr.send_rgwadmin_command.side_effect = [
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_error,
+ ]
+ with self.assertRaises(NoCredentialsException) as cm:
+ RgwClient.admin_instance()
+ self.assertIn('No RGW credentials found', str(cm.exception))
+
+ def test_configure_credentials_error_with_realms(self):
+ self.CONFIG_KEY_DICT.update({
+ 'RGW_API_ACCESS_KEY': '',
+ 'RGW_API_SECRET_KEY': '',
+ })
+ # Get realms, get no user, user creation fails.
+ mgr.send_rgwadmin_command.side_effect = [
+ self._radosgw_admin_result_realms,
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_error,
+ ]
+ with self.assertRaises(NoCredentialsException) as cm:
+ RgwClient.admin_instance()
+ self.assertIn('No RGW credentials found', str(cm.exception))
+
+ def test_set_rgw_credentials_command(self):
+ # Get no realms, get user.
+ mgr.send_rgwadmin_command.side_effect = [
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_user_realm1
+ ]
+ result = self.exec_cmd('set-rgw-credentials')
+ self.assertEqual(result, 'RGW credentials configured')
+ self.assertEqual(Settings.RGW_API_ACCESS_KEY, self._dashboard_user_realm1_access_key)
+ self.assertEqual(Settings.RGW_API_SECRET_KEY, self._dashboard_user_realm1_secret_key)
+
+ # Get no realms, get no user, user creation.
+ mgr.send_rgwadmin_command.side_effect = [
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_user_realm1
+ ]
+ result = self.exec_cmd('set-rgw-credentials')
+ self.assertEqual(result, 'RGW credentials configured')
+ self.assertEqual(Settings.RGW_API_ACCESS_KEY, self._dashboard_user_realm1_access_key)
+ self.assertEqual(Settings.RGW_API_SECRET_KEY, self._dashboard_user_realm1_secret_key)
+
+ # Get realms, get users.
+ mgr.send_rgwadmin_command.side_effect = [
+ self._radosgw_admin_result_realms,
+ self._radosgw_admin_result_user_realm1,
+ self._radosgw_admin_result_user_realm2
+ ]
+ result = self.exec_cmd('set-rgw-credentials')
+ self.assertEqual(result, 'RGW credentials configured')
+ self.assertEqual(Settings.RGW_API_ACCESS_KEY, {
+ 'realm1': self._dashboard_user_realm1_access_key,
+ 'realm2': self._dashboard_user_realm2_access_key
+ })
+ self.assertEqual(Settings.RGW_API_SECRET_KEY, {
+ 'realm1': self._dashboard_user_realm1_secret_key,
+ 'realm2': self._dashboard_user_realm2_secret_key
+ })
+
+ # Get realms, get no users, users' creation.
+ mgr.send_rgwadmin_command.side_effect = [
+ self._radosgw_admin_result_realms,
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_user_realm1,
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_user_realm2
+ ]
+ result = self.exec_cmd('set-rgw-credentials')
+ self.assertEqual(result, 'RGW credentials configured')
+ self.assertEqual(Settings.RGW_API_ACCESS_KEY, {
+ 'realm1': self._dashboard_user_realm1_access_key,
+ 'realm2': self._dashboard_user_realm2_access_key
+ })
+ self.assertEqual(Settings.RGW_API_SECRET_KEY, {
+ 'realm1': self._dashboard_user_realm1_secret_key,
+ 'realm2': self._dashboard_user_realm2_secret_key
+ })
+
+ # Get realms, get no users, realm 2 user creation fails.
+ mgr.send_rgwadmin_command.side_effect = [
+ self._radosgw_admin_result_realms,
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_user_realm1,
+ self._radosgw_admin_result_error,
+ self._radosgw_admin_result_error,
+ ]
+ result = self.exec_cmd('set-rgw-credentials')
+ self.assertEqual(result, 'RGW credentials configured')
+ self.assertEqual(Settings.RGW_API_ACCESS_KEY, {
+ 'realm1': self._dashboard_user_realm1_access_key,
+ })
+ self.assertEqual(Settings.RGW_API_SECRET_KEY, {
+ 'realm1': self._dashboard_user_realm1_secret_key,
+ })
+
+ def test_ssl_verify(self):
+ Settings.RGW_API_SSL_VERIFY = True
+ instance = RgwClient.admin_instance()
+ self.assertTrue(instance.session.verify)
+
+ def test_no_ssl_verify(self):
+ Settings.RGW_API_SSL_VERIFY = False
+ instance = RgwClient.admin_instance()
+ self.assertFalse(instance.session.verify)
+
+ def test_no_daemons(self):
+ RgwStub.get_mgr_no_services()
+ with self.assertRaises(NoRgwDaemonsException) as cm:
+ RgwClient.admin_instance()
+ self.assertIn('No RGW service is running.', str(cm.exception))
+
+ @patch.object(RgwClient, '_get_daemon_zone_info')
+ def test_get_placement_targets_from_zone(self, zone_info):
+ zone_info.return_value = {
+ 'id': 'a0df30ea-4b5b-4830-b143-2bedf684663d',
+ 'placement_pools': [
+ {
+ 'key': 'default-placement',
+ 'val': {
+ 'index_pool': 'default.rgw.buckets.index',
+ 'storage_classes': {
+ 'STANDARD': {
+ 'data_pool': 'default.rgw.buckets.data'
+ }
+ }
+ }
+ }
+ ]
+ }
+
+ instance = RgwClient.admin_instance()
+ expected_result = {
+ 'zonegroup': 'zonegroup1',
+ 'placement_targets': [
+ {
+ 'name': 'default-placement',
+ 'data_pool': 'default.rgw.buckets.data'
+ }
+ ]
+ }
+ self.assertEqual(expected_result, instance.get_placement_targets())
+
+ @patch.object(RgwClient, '_get_realms_info')
+ def test_get_realms(self, realms_info):
+ realms_info.side_effect = [
+ {
+ 'default_info': '51de8373-bc24-4f74-a9b7-8e9ef4cb71f7',
+ 'realms': [
+ 'realm1',
+ 'realm2'
+ ]
+ },
+ {}
+ ]
+ instance = RgwClient.admin_instance()
+
+ self.assertEqual(['realm1', 'realm2'], instance.get_realms())
+ self.assertEqual([], instance.get_realms())
+
+ def test_set_bucket_locking_error(self):
+ instance = RgwClient.admin_instance()
+ test_params = [
+ ('COMPLIANCE', 'null', None, 'must be a positive integer'),
+ ('COMPLIANCE', None, 'null', 'must be a positive integer'),
+ ('COMPLIANCE', -1, None, 'must be a positive integer'),
+ ('COMPLIANCE', None, -1, 'must be a positive integer'),
+ ('COMPLIANCE', 1, 1, 'You can\'t specify both at the same time'),
+ ('COMPLIANCE', None, None, 'You must specify at least one'),
+ ('COMPLIANCE', 0, 0, 'You must specify at least one'),
+ (None, 1, 0, 'must be either COMPLIANCE or GOVERNANCE'),
+ ('', 1, 0, 'must be either COMPLIANCE or GOVERNANCE'),
+ ('FAKE_MODE', 1, 0, 'must be either COMPLIANCE or GOVERNANCE')
+ ]
+ for params in test_params:
+ mode, days, years, error_msg = params
+ with self.assertRaises(DashboardException) as cm:
+ instance.set_bucket_locking(
+ bucket_name='test',
+ mode=mode,
+ retention_period_days=days,
+ retention_period_years=years
+ )
+ self.assertIn(error_msg, str(cm.exception))
+
+ @patch('dashboard.rest_client._Request', Mock())
+ def test_set_bucket_locking_success(self):
+ instance = RgwClient.admin_instance()
+ test_params = [
+ ('Compliance', '1', None),
+ ('Governance', 1, None),
+ ('COMPLIANCE', None, '1'),
+ ('GOVERNANCE', None, 1),
+ ]
+ for params in test_params:
+ mode, days, years = params
+ self.assertIsNone(instance.set_bucket_locking(
+ bucket_name='test',
+ mode=mode,
+ retention_period_days=days,
+ retention_period_years=years
+ ))
+
+
+class RgwClientHelperTest(TestCase):
+ def test_parse_frontend_config_1(self):
+ self.assertEqual(_parse_frontend_config('beast port=8000'), (8000, False))
+
+ def test_parse_frontend_config_2(self):
+ self.assertEqual(_parse_frontend_config('beast port=80 port=8000'), (80, False))
+
+ def test_parse_frontend_config_3(self):
+ self.assertEqual(_parse_frontend_config('beast ssl_port=443 port=8000'), (443, True))
+
+ def test_parse_frontend_config_4(self):
+ self.assertEqual(_parse_frontend_config('beast endpoint=192.168.0.100:8000'), (8000, False))
+
+ def test_parse_frontend_config_5(self):
+ self.assertEqual(_parse_frontend_config('beast endpoint=[::1]'), (80, False))
+
+ def test_parse_frontend_config_6(self):
+ self.assertEqual(_parse_frontend_config(
+ 'beast ssl_endpoint=192.168.0.100:8443'), (8443, True))
+
+ def test_parse_frontend_config_7(self):
+ self.assertEqual(_parse_frontend_config('beast ssl_endpoint=192.168.0.100'), (443, True))
+
+ def test_parse_frontend_config_8(self):
+ self.assertEqual(_parse_frontend_config(
+ 'beast ssl_endpoint=[::1]:8443 endpoint=192.0.2.3:80'), (8443, True))
+
+ def test_parse_frontend_config_9(self):
+ self.assertEqual(_parse_frontend_config(
+ 'beast port=8080 endpoint=192.0.2.3:80'), (8080, False))
+
+ def test_parse_frontend_config_10(self):
+ self.assertEqual(_parse_frontend_config(
+ 'beast ssl_endpoint=192.0.2.3:8443 port=8080'), (8443, True))
+
+ def test_parse_frontend_config_11(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=8000s'), (8000, True))
+
+ def test_parse_frontend_config_12(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=443s port=8000'), (443, True))
+
+ def test_parse_frontend_config_13(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=192.0.2.3:80'), (80, False))
+
+ def test_parse_frontend_config_14(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=172.5.2.51:8080s'), (8080, True))
+
+ def test_parse_frontend_config_15(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=[::]:8080'), (8080, False))
+
+ def test_parse_frontend_config_16(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=ip6-localhost:80s'), (80, True))
+
+ def test_parse_frontend_config_17(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=[2001:0db8::1234]:80'), (80, False))
+
+ def test_parse_frontend_config_18(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=[::1]:8443s'), (8443, True))
+
+ def test_parse_frontend_config_19(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=127.0.0.1:8443s+8000'), (8443, True))
+
+ def test_parse_frontend_config_20(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=127.0.0.1:8080+443s'), (8080, False))
+
+ def test_parse_frontend_config_21(self):
+ with self.assertRaises(LookupError) as ctx:
+ _parse_frontend_config('civetweb port=xyz')
+ self.assertEqual(str(ctx.exception),
+ 'Failed to determine RGW port from "civetweb port=xyz"')
+
+ def test_parse_frontend_config_22(self):
+ with self.assertRaises(LookupError) as ctx:
+ _parse_frontend_config('civetweb')
+ self.assertEqual(str(ctx.exception), 'Failed to determine RGW port from "civetweb"')
+
+ def test_parse_frontend_config_23(self):
+ with self.assertRaises(LookupError) as ctx:
+ _parse_frontend_config('mongoose port=8080')
+ self.assertEqual(str(ctx.exception),
+ 'Failed to determine RGW port from "mongoose port=8080"')
diff --git a/src/pybind/mgr/dashboard/tests/test_settings.py b/src/pybind/mgr/dashboard/tests/test_settings.py
new file mode 100644
index 000000000..e204b566a
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_settings.py
@@ -0,0 +1,208 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import errno
+import unittest
+
+from mgr_module import ERROR_MSG_EMPTY_INPUT_FILE
+
+from .. import settings
+from ..controllers.settings import Settings as SettingsController
+from ..settings import Settings, handle_option_command
+from ..tests import ControllerTestCase, KVStoreMockMixin
+
+
+class SettingsTest(unittest.TestCase, KVStoreMockMixin):
+ @classmethod
+ def setUpClass(cls):
+ setattr(settings.Options, 'GRAFANA_API_HOST', settings.Setting('localhost', [str]))
+ setattr(settings.Options, 'GRAFANA_API_PORT', settings.Setting(3000, [int]))
+ setattr(settings.Options, 'GRAFANA_ENABLED', settings.Setting(False, [bool]))
+ # pylint: disable=protected-access
+ settings._OPTIONS_COMMAND_MAP = settings._options_command_map()
+
+ def setUp(self):
+ self.mock_kv_store()
+ if Settings.GRAFANA_API_HOST != 'localhost':
+ Settings.GRAFANA_API_HOST = 'localhost'
+ if Settings.GRAFANA_API_PORT != 3000:
+ Settings.GRAFANA_API_PORT = 3000
+
+ def test_get_setting(self):
+ self.assertEqual(Settings.GRAFANA_API_HOST, 'localhost')
+ self.assertEqual(Settings.GRAFANA_API_PORT, 3000)
+ self.assertEqual(Settings.GRAFANA_ENABLED, False)
+
+ def test_set_setting(self):
+ Settings.GRAFANA_API_HOST = 'grafanahost'
+ self.assertEqual(Settings.GRAFANA_API_HOST, 'grafanahost')
+
+ Settings.GRAFANA_API_PORT = 50
+ self.assertEqual(Settings.GRAFANA_API_PORT, 50)
+
+ Settings.GRAFANA_ENABLED = True
+ self.assertEqual(Settings.GRAFANA_ENABLED, True)
+
+ def test_get_cmd(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard get-grafana-api-port'},
+ None
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, '3000')
+ self.assertEqual(err, '')
+
+ def test_set_cmd(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard set-grafana-api-port',
+ 'value': '4000'},
+ None
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, 'Option GRAFANA_API_PORT updated')
+ self.assertEqual(err, '')
+
+ def test_set_secret_empty(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard set-grafana-api-password'},
+ None
+ )
+ self.assertEqual(r, -errno.EINVAL)
+ self.assertEqual(out, '')
+ self.assertIn(ERROR_MSG_EMPTY_INPUT_FILE, err)
+
+ def test_set_secret(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard set-grafana-api-password'},
+ 'my-secret'
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, 'Option GRAFANA_API_PASSWORD updated')
+ self.assertEqual(err, '')
+
+ def test_reset_cmd(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard reset-grafana-enabled'},
+ None
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, 'Option {} reset to default value "{}"'.format(
+ 'GRAFANA_ENABLED', Settings.GRAFANA_ENABLED))
+ self.assertEqual(err, '')
+
+ def test_inv_cmd(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard get-non-existent-option'},
+ None
+ )
+ self.assertEqual(r, -errno.ENOSYS)
+ self.assertEqual(out, '')
+ self.assertEqual(err, "Command not found "
+ "'dashboard get-non-existent-option'")
+
+ def test_sync(self):
+ Settings.GRAFANA_API_PORT = 5000
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard get-grafana-api-port'},
+ None
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, '5000')
+ self.assertEqual(err, '')
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard set-grafana-api-host',
+ 'value': 'new-local-host'},
+ None
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, 'Option GRAFANA_API_HOST updated')
+ self.assertEqual(err, '')
+ self.assertEqual(Settings.GRAFANA_API_HOST, 'new-local-host')
+
+ def test_attribute_error(self):
+ with self.assertRaises(AttributeError) as ctx:
+ _ = Settings.NON_EXISTENT_OPTION
+
+ self.assertEqual(str(ctx.exception),
+ "type object 'Options' has no attribute 'NON_EXISTENT_OPTION'")
+
+
+class SettingsControllerTest(ControllerTestCase, KVStoreMockMixin):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([SettingsController])
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ setattr(settings.Options, 'GRAFANA_API_HOST', settings.Setting('localhost', [str]))
+ setattr(settings.Options, 'GRAFANA_ENABLED', settings.Setting(False, [bool]))
+
+ @classmethod
+ def tearDownClass(cls):
+ super().tearDownClass()
+
+ def setUp(self):
+ super().setUp()
+ self.mock_kv_store()
+
+ def test_settings_list(self):
+ self._get('/api/settings')
+ data = self.json_body()
+ self.assertTrue(len(data) > 0)
+ self.assertStatus(200)
+ self.assertIn('default', data[0].keys())
+ self.assertIn('type', data[0].keys())
+ self.assertIn('name', data[0].keys())
+ self.assertIn('value', data[0].keys())
+
+ def test_settings_list_filtered(self):
+ self._get('/api/settings?names=GRAFANA_ENABLED,PWD_POLICY_ENABLED')
+ self.assertStatus(200)
+ data = self.json_body()
+ self.assertTrue(len(data) == 2)
+ names = [option['name'] for option in data]
+ self.assertIn('GRAFANA_ENABLED', names)
+ self.assertIn('PWD_POLICY_ENABLED', names)
+
+ def test_rgw_daemon_get(self):
+ self._get('/api/settings/grafana-api-username')
+ self.assertStatus(200)
+ self.assertJsonBody({
+ u'default': u'admin',
+ u'type': u'str',
+ u'name': u'GRAFANA_API_USERNAME',
+ u'value': u'admin',
+ })
+
+ def test_set(self):
+ self._put('/api/settings/GRAFANA_API_USERNAME', {'value': 'foo'})
+ self.assertStatus(200)
+
+ self._get('/api/settings/GRAFANA_API_USERNAME')
+ self.assertStatus(200)
+ self.assertInJsonBody('default')
+ self.assertInJsonBody('type')
+ self.assertInJsonBody('name')
+ self.assertInJsonBody('value')
+ self.assertEqual(self.json_body()['value'], 'foo')
+
+ def test_bulk_set(self):
+ self._put('/api/settings', {
+ 'GRAFANA_API_USERNAME': 'foo',
+ 'GRAFANA_API_HOST': 'somehost',
+ })
+ self.assertStatus(200)
+
+ self._get('/api/settings/grafana-api-username')
+ self.assertStatus(200)
+ body = self.json_body()
+ self.assertEqual(body['value'], 'foo')
+
+ self._get('/api/settings/grafana-api-username')
+ self.assertStatus(200)
+ self.assertEqual(self.json_body()['value'], 'foo')
+
+ self._get('/api/settings/grafana-api-host')
+ self.assertStatus(200)
+ self.assertEqual(self.json_body()['value'], 'somehost')
diff --git a/src/pybind/mgr/dashboard/tests/test_ssl.py b/src/pybind/mgr/dashboard/tests/test_ssl.py
new file mode 100644
index 000000000..840f2b8c9
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_ssl.py
@@ -0,0 +1,28 @@
+import errno
+import unittest
+
+from ..tests import CLICommandTestMixin, CmdException
+
+
+class SslTest(unittest.TestCase, CLICommandTestMixin):
+
+ def test_ssl_certificate_and_key(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('set-ssl-certificate', inbuf='', mgr_id='x')
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertEqual(str(ctx.exception), 'Please specify the certificate with "-i" option')
+
+ result = self.exec_cmd('set-ssl-certificate', inbuf='content', mgr_id='x')
+ self.assertEqual(result, 'SSL certificate updated')
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('set-ssl-certificate-key', inbuf='', mgr_id='x')
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertEqual(str(ctx.exception), 'Please specify the certificate key with "-i" option')
+
+ result = self.exec_cmd('set-ssl-certificate-key', inbuf='content', mgr_id='x')
+ self.assertEqual(result, 'SSL certificate key updated')
+
+ def test_set_mgr_created_self_signed_cert(self):
+ result = self.exec_cmd('create-self-signed-cert')
+ self.assertEqual(result, 'Self-signed certificate created')
diff --git a/src/pybind/mgr/dashboard/tests/test_sso.py b/src/pybind/mgr/dashboard/tests/test_sso.py
new file mode 100644
index 000000000..5594738d1
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_sso.py
@@ -0,0 +1,153 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=dangerous-default-value,too-many-public-methods
+from __future__ import absolute_import
+
+import errno
+import unittest
+
+from ..services.sso import load_sso_db
+from ..tests import CLICommandTestMixin, CmdException
+
+
+class AccessControlTest(unittest.TestCase, CLICommandTestMixin):
+ IDP_METADATA = '''<?xml version="1.0"?>
+<md:EntityDescriptor xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata"
+ xmlns:ds="http://www.w3.org/2000/09/xmldsig#"
+ entityID="https://testidp.ceph.com/simplesamlphp/saml2/idp/metadata.php"
+ ID="pfx8ca6fbd7-6062-d4a9-7995-0730aeb8114f">
+ <ds:Signature>
+ <ds:SignedInfo>
+ <ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
+ <ds:SignatureMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"/>
+ <ds:Reference URI="#pfx8ca6fbd7-6062-d4a9-7995-0730aeb8114f">
+ <ds:Transforms>
+ <ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/>
+ <ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
+ </ds:Transforms>
+ <ds:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256"/>
+ <ds:DigestValue>v6V8fooEUeq/LO/59JCfJF69Tw3ohN52OGAY6X3jX8w=</ds:DigestValue>
+ </ds:Reference>
+ </ds:SignedInfo>
+ <ds:SignatureValue>IDP_SIGNATURE_VALUE</ds:SignatureValue>
+ <ds:KeyInfo>
+ <ds:X509Data>
+ <ds:X509Certificate>IDP_X509_CERTIFICATE</ds:X509Certificate>
+ </ds:X509Data>
+ </ds:KeyInfo>
+ </ds:Signature>
+ <md:IDPSSODescriptor protocolSupportEnumeration="urn:oasis:names:tc:SAML:2.0:protocol">
+ <md:KeyDescriptor use="signing">
+ <ds:KeyInfo xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
+ <ds:X509Data>
+ <ds:X509Certificate>IDP_X509_CERTIFICATE</ds:X509Certificate>
+ </ds:X509Data>
+ </ds:KeyInfo>
+ </md:KeyDescriptor>
+ <md:KeyDescriptor use="encryption">
+ <ds:KeyInfo xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
+ <ds:X509Data>
+ <ds:X509Certificate>IDP_X509_CERTIFICATE</ds:X509Certificate>
+ </ds:X509Data>
+ </ds:KeyInfo>
+ </md:KeyDescriptor>
+ <md:SingleLogoutService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
+ Location="https://testidp.ceph.com/simplesamlphp/saml2/idp/SingleLogoutService.php"/>
+ <md:NameIDFormat>urn:oasis:names:tc:SAML:2.0:nameid-format:transient</md:NameIDFormat>
+ <md:SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
+ Location="https://testidp.ceph.com/simplesamlphp/saml2/idp/SSOService.php"/>
+ </md:IDPSSODescriptor>
+</md:EntityDescriptor>'''
+
+ def setUp(self):
+ self.mock_kv_store()
+ load_sso_db()
+
+ def validate_onelogin_settings(self, onelogin_settings, ceph_dashboard_base_url, uid,
+ sp_x509cert, sp_private_key, signature_enabled):
+ self.assertIn('sp', onelogin_settings)
+ self.assertIn('entityId', onelogin_settings['sp'])
+ self.assertEqual(onelogin_settings['sp']['entityId'],
+ '{}/auth/saml2/metadata'.format(ceph_dashboard_base_url))
+
+ self.assertIn('assertionConsumerService', onelogin_settings['sp'])
+ self.assertIn('url', onelogin_settings['sp']['assertionConsumerService'])
+ self.assertEqual(onelogin_settings['sp']['assertionConsumerService']['url'],
+ '{}/auth/saml2'.format(ceph_dashboard_base_url))
+
+ self.assertIn('attributeConsumingService', onelogin_settings['sp'])
+ attribute_consuming_service = onelogin_settings['sp']['attributeConsumingService']
+ self.assertIn('requestedAttributes', attribute_consuming_service)
+ requested_attributes = attribute_consuming_service['requestedAttributes']
+ self.assertEqual(len(requested_attributes), 1)
+ self.assertIn('name', requested_attributes[0])
+ self.assertEqual(requested_attributes[0]['name'], uid)
+
+ self.assertIn('singleLogoutService', onelogin_settings['sp'])
+ self.assertIn('url', onelogin_settings['sp']['singleLogoutService'])
+ self.assertEqual(onelogin_settings['sp']['singleLogoutService']['url'],
+ '{}/auth/saml2/logout'.format(ceph_dashboard_base_url))
+
+ self.assertIn('x509cert', onelogin_settings['sp'])
+ self.assertEqual(onelogin_settings['sp']['x509cert'], sp_x509cert)
+
+ self.assertIn('privateKey', onelogin_settings['sp'])
+ self.assertEqual(onelogin_settings['sp']['privateKey'], sp_private_key)
+
+ self.assertIn('security', onelogin_settings)
+ self.assertIn('authnRequestsSigned', onelogin_settings['security'])
+ self.assertEqual(onelogin_settings['security']['authnRequestsSigned'], signature_enabled)
+
+ self.assertIn('logoutRequestSigned', onelogin_settings['security'])
+ self.assertEqual(onelogin_settings['security']['logoutRequestSigned'], signature_enabled)
+
+ self.assertIn('logoutResponseSigned', onelogin_settings['security'])
+ self.assertEqual(onelogin_settings['security']['logoutResponseSigned'], signature_enabled)
+
+ self.assertIn('wantMessagesSigned', onelogin_settings['security'])
+ self.assertEqual(onelogin_settings['security']['wantMessagesSigned'], signature_enabled)
+
+ self.assertIn('wantAssertionsSigned', onelogin_settings['security'])
+ self.assertEqual(onelogin_settings['security']['wantAssertionsSigned'], signature_enabled)
+
+ def test_sso_saml2_setup(self):
+ result = self.exec_cmd('sso setup saml2',
+ ceph_dashboard_base_url='https://cephdashboard.local',
+ idp_metadata=self.IDP_METADATA)
+ self.validate_onelogin_settings(result, 'https://cephdashboard.local', 'uid', '', '',
+ False)
+
+ def test_sso_enable_saml2(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('sso enable saml2')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EPERM)
+ self.assertEqual(str(ctx.exception), 'Single Sign-On is not configured: '
+ 'use `ceph dashboard sso setup saml2`')
+
+ self.exec_cmd('sso setup saml2',
+ ceph_dashboard_base_url='https://cephdashboard.local',
+ idp_metadata=self.IDP_METADATA)
+
+ result = self.exec_cmd('sso enable saml2')
+ self.assertEqual(result, 'SSO is "enabled" with "SAML2" protocol.')
+
+ def test_sso_disable(self):
+ result = self.exec_cmd('sso disable')
+ self.assertEqual(result, 'SSO is "disabled".')
+
+ def test_sso_status(self):
+ result = self.exec_cmd('sso status')
+ self.assertEqual(result, 'SSO is "disabled".')
+
+ self.exec_cmd('sso setup saml2',
+ ceph_dashboard_base_url='https://cephdashboard.local',
+ idp_metadata=self.IDP_METADATA)
+
+ result = self.exec_cmd('sso status')
+ self.assertEqual(result, 'SSO is "enabled" with "SAML2" protocol.')
+
+ def test_sso_show_saml2(self):
+ result = self.exec_cmd('sso show saml2')
+ self.assertEqual(result, {
+ 'onelogin_settings': {}
+ })
diff --git a/src/pybind/mgr/dashboard/tests/test_task.py b/src/pybind/mgr/dashboard/tests/test_task.py
new file mode 100644
index 000000000..0d51e2d3f
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_task.py
@@ -0,0 +1,433 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import json
+import threading
+import time
+import unittest
+from collections import defaultdict
+from functools import partial
+
+from ..services.exception import serialize_dashboard_exception
+from ..tools import NotificationQueue, TaskExecutor, TaskManager
+
+
+class MyTask(object):
+ class CallbackExecutor(TaskExecutor):
+ def __init__(self, fail, progress):
+ super(MyTask.CallbackExecutor, self).__init__()
+ self.fail = fail
+ self.progress = progress
+
+ def init(self, task):
+ super(MyTask.CallbackExecutor, self).init(task)
+ args = [self.callback]
+ args.extend(self.task.fn_args)
+ self.task.fn_args = args
+
+ def callback(self, result):
+ self.task.set_progress(self.progress)
+ if self.fail:
+ self.finish(None, Exception("Task Unexpected Exception"))
+ else:
+ self.finish(result, None)
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, op_seconds, wait=False, fail=False, progress=50,
+ is_async=False, handle_ex=False):
+ self.op_seconds = op_seconds
+ self.wait = wait
+ self.fail = fail
+ self.progress = progress
+ self.is_async = is_async
+ self.handle_ex = handle_ex
+ self._event = threading.Event()
+
+ def run(self, ns, timeout=None):
+ args = ['dummy arg']
+ kwargs = {'dummy': 'arg'}
+ h_ex = partial(serialize_dashboard_exception,
+ include_http_status=True) if self.handle_ex else None
+ if not self.is_async:
+ task = TaskManager.run(
+ ns, self.metadata(), self.task_op, args, kwargs,
+ exception_handler=h_ex)
+ else:
+ task = TaskManager.run(
+ ns, self.metadata(), self.task_async_op, args, kwargs,
+ executor=MyTask.CallbackExecutor(self.fail, self.progress),
+ exception_handler=h_ex)
+ return task.wait(timeout)
+
+ def task_op(self, *args, **kwargs):
+ time.sleep(self.op_seconds)
+ TaskManager.current_task().set_progress(self.progress)
+ if self.fail:
+ raise Exception("Task Unexpected Exception")
+ if self.wait:
+ self._event.wait()
+ return {'args': list(args), 'kwargs': kwargs}
+
+ def task_async_op(self, callback, *args, **kwargs):
+ if self.fail == "premature":
+ raise Exception("Task Unexpected Exception")
+
+ def _run_bg():
+ time.sleep(self.op_seconds)
+ if self.wait:
+ self._event.wait()
+ callback({'args': list(args), 'kwargs': kwargs})
+
+ worker = threading.Thread(target=_run_bg)
+ worker.start()
+
+ def resume(self):
+ self._event.set()
+
+ def metadata(self):
+ return {
+ 'op_seconds': self.op_seconds,
+ 'wait': self.wait,
+ 'fail': self.fail,
+ 'progress': self.progress,
+ 'is_async': self.is_async,
+ 'handle_ex': self.handle_ex
+ }
+
+
+class TaskTest(unittest.TestCase):
+
+ TASK_FINISHED_MAP = defaultdict(threading.Event)
+
+ @classmethod
+ def _handle_task(cls, task):
+ cls.TASK_FINISHED_MAP[task.name].set()
+
+ @classmethod
+ def wait_for_task(cls, name):
+ cls.TASK_FINISHED_MAP[name].wait()
+
+ @classmethod
+ def setUpClass(cls):
+ NotificationQueue.start_queue()
+ TaskManager.init()
+ NotificationQueue.register(cls._handle_task, 'cd_task_finished',
+ priority=100)
+
+ @classmethod
+ def tearDownClass(cls):
+ NotificationQueue.deregister(cls._handle_task, 'cd_task_finished')
+ NotificationQueue.stop()
+
+ def setUp(self):
+ TaskManager.FINISHED_TASK_SIZE = 10
+ TaskManager.FINISHED_TASK_TTL = 60.0
+
+ def assertTaskResult(self, result): # noqa: N802
+ self.assertEqual(result,
+ {'args': ['dummy arg'], 'kwargs': {'dummy': 'arg'}})
+
+ def test_fast_task(self):
+ task1 = MyTask(1)
+ state, result = task1.run('test1/task1')
+ self.assertEqual(state, TaskManager.VALUE_DONE)
+ self.assertTaskResult(result)
+ self.wait_for_task('test1/task1')
+ _, fn_t = TaskManager.list('test1/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].exception)
+ self.assertTaskResult(fn_t[0].ret_value)
+ self.assertEqual(fn_t[0].progress, 100)
+
+ def test_slow_task(self):
+ task1 = MyTask(1)
+ state, result = task1.run('test2/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ self.wait_for_task('test2/task1')
+ _, fn_t = TaskManager.list('test2/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].exception)
+ self.assertTaskResult(fn_t[0].ret_value)
+ self.assertEqual(fn_t[0].progress, 100)
+
+ def test_fast_task_with_failure(self):
+ task1 = MyTask(1, fail=True, progress=40)
+
+ with self.assertRaises(Exception) as ctx:
+ task1.run('test3/task1')
+
+ self.assertEqual(str(ctx.exception), "Task Unexpected Exception")
+ self.wait_for_task('test3/task1')
+ _, fn_t = TaskManager.list('test3/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].ret_value)
+ self.assertEqual(str(fn_t[0].exception), "Task Unexpected Exception")
+ self.assertEqual(fn_t[0].progress, 40)
+
+ def test_slow_task_with_failure(self):
+ task1 = MyTask(1, fail=True, progress=70)
+ state, result = task1.run('test4/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ self.wait_for_task('test4/task1')
+ _, fn_t = TaskManager.list('test4/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].ret_value)
+ self.assertEqual(str(fn_t[0].exception), "Task Unexpected Exception")
+ self.assertEqual(fn_t[0].progress, 70)
+
+ def test_executing_tasks_list(self):
+ task1 = MyTask(0, wait=True, progress=30)
+ task2 = MyTask(0, wait=True, progress=60)
+ state, result = task1.run('test5/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ ex_t, _ = TaskManager.list('test5/*')
+ self.assertEqual(len(ex_t), 1)
+ self.assertEqual(ex_t[0].name, 'test5/task1')
+ self.assertEqual(ex_t[0].progress, 30)
+ state, result = task2.run('test5/task2', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ ex_t, _ = TaskManager.list('test5/*')
+ self.assertEqual(len(ex_t), 2)
+ for task in ex_t:
+ if task.name == 'test5/task1':
+ self.assertEqual(task.progress, 30)
+ elif task.name == 'test5/task2':
+ self.assertEqual(task.progress, 60)
+ task2.resume()
+ self.wait_for_task('test5/task2')
+ ex_t, _ = TaskManager.list('test5/*')
+ self.assertEqual(len(ex_t), 1)
+ self.assertEqual(ex_t[0].name, 'test5/task1')
+ task1.resume()
+ self.wait_for_task('test5/task1')
+ ex_t, _ = TaskManager.list('test5/*')
+ self.assertEqual(len(ex_t), 0)
+
+ def test_task_idempotent(self):
+ task1 = MyTask(0, wait=True)
+ task1_clone = MyTask(0, wait=True)
+ state, result = task1.run('test6/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ ex_t, _ = TaskManager.list('test6/*')
+ self.assertEqual(len(ex_t), 1)
+ self.assertEqual(ex_t[0].name, 'test6/task1')
+ state, result = task1_clone.run('test6/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ ex_t, _ = TaskManager.list('test6/*')
+ self.assertEqual(len(ex_t), 1)
+ self.assertEqual(ex_t[0].name, 'test6/task1')
+ task1.resume()
+ self.wait_for_task('test6/task1')
+ ex_t, fn_t = TaskManager.list('test6/*')
+ self.assertEqual(len(ex_t), 0)
+ self.assertEqual(len(fn_t), 1)
+
+ def test_finished_cleanup(self):
+ TaskManager.FINISHED_TASK_SIZE = 2
+ TaskManager.FINISHED_TASK_TTL = 0.5
+ task1 = MyTask(0)
+ task2 = MyTask(0)
+ state, result = task1.run('test7/task1')
+ self.assertEqual(state, TaskManager.VALUE_DONE)
+ self.assertTaskResult(result)
+ self.wait_for_task('test7/task1')
+ state, result = task2.run('test7/task2')
+ self.assertEqual(state, TaskManager.VALUE_DONE)
+ self.assertTaskResult(result)
+ self.wait_for_task('test7/task2')
+ time.sleep(1)
+ _, fn_t = TaskManager.list('test7/*')
+ self.assertEqual(len(fn_t), 2)
+ for idx, task in enumerate(fn_t):
+ self.assertEqual(task.name,
+ "test7/task{}".format(len(fn_t)-idx))
+ task3 = MyTask(0)
+ state, result = task3.run('test7/task3')
+ self.assertEqual(state, TaskManager.VALUE_DONE)
+ self.assertTaskResult(result)
+ self.wait_for_task('test7/task3')
+ time.sleep(1)
+ _, fn_t = TaskManager.list('test7/*')
+ self.assertEqual(len(fn_t), 3)
+ for idx, task in enumerate(fn_t):
+ self.assertEqual(task.name,
+ "test7/task{}".format(len(fn_t)-idx))
+ _, fn_t = TaskManager.list('test7/*')
+ self.assertEqual(len(fn_t), 2)
+ for idx, task in enumerate(fn_t):
+ self.assertEqual(task.name,
+ "test7/task{}".format(len(fn_t)-idx+1))
+
+ def test_task_serialization_format(self):
+ task1 = MyTask(0, wait=True, progress=20)
+ task2 = MyTask(1)
+ task1.run('test8/task1', 0.5)
+ task2.run('test8/task2', 0.5)
+ self.wait_for_task('test8/task2')
+ ex_t, fn_t = TaskManager.list_serializable('test8/*')
+ self.assertEqual(len(ex_t), 1)
+ self.assertEqual(len(fn_t), 1)
+
+ try:
+ json.dumps(ex_t)
+ except ValueError as ex:
+ self.fail("Failed to serialize executing tasks: {}".format(str(ex)))
+
+ try:
+ json.dumps(fn_t)
+ except ValueError as ex:
+ self.fail("Failed to serialize finished tasks: {}".format(str(ex)))
+
+ # validate executing tasks attributes
+ self.assertEqual(len(ex_t[0].keys()), 4)
+ self.assertEqual(ex_t[0]['name'], 'test8/task1')
+ self.assertEqual(ex_t[0]['metadata'], task1.metadata())
+ self.assertIsNotNone(ex_t[0]['begin_time'])
+ self.assertEqual(ex_t[0]['progress'], 20)
+ # validate finished tasks attributes
+ self.assertEqual(len(fn_t[0].keys()), 9)
+ self.assertEqual(fn_t[0]['name'], 'test8/task2')
+ self.assertEqual(fn_t[0]['metadata'], task2.metadata())
+ self.assertIsNotNone(fn_t[0]['begin_time'])
+ self.assertIsNotNone(fn_t[0]['end_time'])
+ self.assertGreaterEqual(fn_t[0]['duration'], 1.0)
+ self.assertEqual(fn_t[0]['progress'], 100)
+ self.assertTrue(fn_t[0]['success'])
+ self.assertTaskResult(fn_t[0]['ret_value'])
+ self.assertIsNone(fn_t[0]['exception'])
+ task1.resume()
+ self.wait_for_task('test8/task1')
+
+ def test_fast_async_task(self):
+ task1 = MyTask(1, is_async=True)
+ state, result = task1.run('test9/task1')
+ self.assertEqual(state, TaskManager.VALUE_DONE)
+ self.assertTaskResult(result)
+ self.wait_for_task('test9/task1')
+ _, fn_t = TaskManager.list('test9/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].exception)
+ self.assertTaskResult(fn_t[0].ret_value)
+ self.assertEqual(fn_t[0].progress, 100)
+
+ def test_slow_async_task(self):
+ task1 = MyTask(1, is_async=True)
+ state, result = task1.run('test10/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ self.wait_for_task('test10/task1')
+ _, fn_t = TaskManager.list('test10/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].exception)
+ self.assertTaskResult(fn_t[0].ret_value)
+ self.assertEqual(fn_t[0].progress, 100)
+
+ def test_fast_async_task_with_failure(self):
+ task1 = MyTask(1, fail=True, progress=40, is_async=True)
+
+ with self.assertRaises(Exception) as ctx:
+ task1.run('test11/task1')
+
+ self.assertEqual(str(ctx.exception), "Task Unexpected Exception")
+ self.wait_for_task('test11/task1')
+ _, fn_t = TaskManager.list('test11/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].ret_value)
+ self.assertEqual(str(fn_t[0].exception), "Task Unexpected Exception")
+ self.assertEqual(fn_t[0].progress, 40)
+
+ def test_slow_async_task_with_failure(self):
+ task1 = MyTask(1, fail=True, progress=70, is_async=True)
+ state, result = task1.run('test12/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ self.wait_for_task('test12/task1')
+ _, fn_t = TaskManager.list('test12/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].ret_value)
+ self.assertEqual(str(fn_t[0].exception), "Task Unexpected Exception")
+ self.assertEqual(fn_t[0].progress, 70)
+
+ def test_fast_async_task_with_premature_failure(self):
+ task1 = MyTask(1, fail="premature", progress=40, is_async=True)
+
+ with self.assertRaises(Exception) as ctx:
+ task1.run('test13/task1')
+
+ self.assertEqual(str(ctx.exception), "Task Unexpected Exception")
+ self.wait_for_task('test13/task1')
+ _, fn_t = TaskManager.list('test13/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].ret_value)
+ self.assertEqual(str(fn_t[0].exception), "Task Unexpected Exception")
+
+ def test_task_serialization_format_on_failure(self):
+ task1 = MyTask(1, fail=True)
+ task1.run('test14/task1', 0.5)
+ self.wait_for_task('test14/task1')
+ ex_t, fn_t = TaskManager.list_serializable('test14/*')
+ self.assertEqual(len(ex_t), 0)
+ self.assertEqual(len(fn_t), 1)
+ # validate finished tasks attributes
+
+ try:
+ json.dumps(fn_t)
+ except TypeError as ex:
+ self.fail("Failed to serialize finished tasks: {}".format(str(ex)))
+
+ self.assertEqual(len(fn_t[0].keys()), 9)
+ self.assertEqual(fn_t[0]['name'], 'test14/task1')
+ self.assertEqual(fn_t[0]['metadata'], task1.metadata())
+ self.assertIsNotNone(fn_t[0]['begin_time'])
+ self.assertIsNotNone(fn_t[0]['end_time'])
+ self.assertGreaterEqual(fn_t[0]['duration'], 1.0)
+ self.assertEqual(fn_t[0]['progress'], 50)
+ self.assertFalse(fn_t[0]['success'])
+ self.assertIsNotNone(fn_t[0]['exception'])
+ self.assertEqual(fn_t[0]['exception'],
+ {"detail": "Task Unexpected Exception"})
+
+ def test_task_serialization_format_on_failure_with_handler(self):
+ task1 = MyTask(1, fail=True, handle_ex=True)
+ task1.run('test15/task1', 0.5)
+ self.wait_for_task('test15/task1')
+ ex_t, fn_t = TaskManager.list_serializable('test15/*')
+ self.assertEqual(len(ex_t), 0)
+ self.assertEqual(len(fn_t), 1)
+ # validate finished tasks attributes
+
+ try:
+ json.dumps(fn_t)
+ except TypeError as ex:
+ self.fail("Failed to serialize finished tasks: {}".format(str(ex)))
+
+ self.assertEqual(len(fn_t[0].keys()), 9)
+ self.assertEqual(fn_t[0]['name'], 'test15/task1')
+ self.assertEqual(fn_t[0]['metadata'], task1.metadata())
+ self.assertIsNotNone(fn_t[0]['begin_time'])
+ self.assertIsNotNone(fn_t[0]['end_time'])
+ self.assertGreaterEqual(fn_t[0]['duration'], 1.0)
+ self.assertEqual(fn_t[0]['progress'], 50)
+ self.assertFalse(fn_t[0]['success'])
+ self.assertIsNotNone(fn_t[0]['exception'])
+ self.assertEqual(fn_t[0]['exception'], {
+ 'component': None,
+ 'detail': 'Task Unexpected Exception',
+ 'status': 500,
+ 'task': {
+ 'metadata': {
+ 'fail': True,
+ 'handle_ex': True,
+ 'is_async': False,
+ 'op_seconds': 1,
+ 'progress': 50,
+ 'wait': False},
+ 'name': 'test15/task1'
+ }
+ })
diff --git a/src/pybind/mgr/dashboard/tests/test_tools.py b/src/pybind/mgr/dashboard/tests/test_tools.py
new file mode 100644
index 000000000..eaae3e295
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_tools.py
@@ -0,0 +1,211 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import unittest
+
+import cherrypy
+from cherrypy.lib.sessions import RamSession
+
+try:
+ from mock import patch
+except ImportError:
+ from unittest.mock import patch
+
+from ..controllers import APIRouter, BaseController, Proxy, RESTController, Router
+from ..controllers._version import APIVersion
+from ..services.exception import handle_rados_error
+from ..tests import ControllerTestCase
+from ..tools import dict_contains_path, dict_get, json_str_to_object, \
+ merge_list_of_dicts_by_key, partial_dict
+
+
+# pylint: disable=W0613
+@Router('/foo', secure=False)
+class FooResource(RESTController):
+ elems = []
+
+ def list(self):
+ return FooResource.elems
+
+ def create(self, a):
+ FooResource.elems.append({'a': a})
+ return {'a': a}
+
+ def get(self, key):
+ return {'detail': (key, [])}
+
+ def delete(self, key):
+ del FooResource.elems[int(key)]
+
+ def bulk_delete(self):
+ FooResource.elems = []
+
+ def set(self, key, newdata):
+ FooResource.elems[int(key)] = {'newdata': newdata}
+ return dict(key=key, newdata=newdata)
+
+
+@Router('/foo/:key/:method', secure=False)
+class FooResourceDetail(RESTController):
+ def list(self, key, method):
+ return {'detail': (key, [method])}
+
+
+@APIRouter('/rgw/proxy', secure=False)
+class GenerateControllerRoutesController(BaseController):
+ @Proxy()
+ def __call__(self, path, **params):
+ pass
+
+
+@APIRouter('/fooargs', secure=False)
+class FooArgs(RESTController):
+ def set(self, code, name=None, opt1=None, opt2=None):
+ return {'code': code, 'name': name, 'opt1': opt1, 'opt2': opt2}
+
+ @handle_rados_error('foo')
+ def create(self, my_arg_name):
+ return my_arg_name
+
+ def list(self):
+ raise cherrypy.NotFound()
+
+
+class Root(object):
+ foo_resource = FooResource()
+ fooargs = FooArgs()
+
+
+class RESTControllerTest(ControllerTestCase):
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers(
+ [FooResource, FooResourceDetail, FooArgs, GenerateControllerRoutesController])
+
+ def test_empty(self):
+ self._delete("/foo")
+ self.assertStatus(204)
+ self._get("/foo")
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', APIVersion.DEFAULT.to_mime_type())
+ self.assertBody('[]')
+
+ def test_fill(self):
+ sess_mock = RamSession()
+ with patch('cherrypy.session', sess_mock, create=True):
+ data = {'a': 'b'}
+ for _ in range(5):
+ self._post("/foo", data)
+ self.assertJsonBody(data)
+ self.assertStatus(201)
+ self.assertHeader('Content-Type', APIVersion.DEFAULT.to_mime_type())
+
+ self._get("/foo")
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', APIVersion.DEFAULT.to_mime_type())
+ self.assertJsonBody([data] * 5)
+
+ self._put('/foo/0', {'newdata': 'newdata'})
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', APIVersion.DEFAULT.to_mime_type())
+ self.assertJsonBody({'newdata': 'newdata', 'key': '0'})
+
+ def test_not_implemented(self):
+ self._put("/foo")
+ self.assertStatus(404)
+ body = self.json_body()
+ self.assertIsInstance(body, dict)
+ assert body['detail'] == "The path '/foo' was not found."
+ assert '404' in body['status']
+
+ def test_args_from_json(self):
+ self._put("/api/fooargs/hello", {'name': 'world'})
+ self.assertJsonBody({'code': 'hello', 'name': 'world', 'opt1': None, 'opt2': None})
+
+ self._put("/api/fooargs/hello", {'name': 'world', 'opt1': 'opt1'})
+ self.assertJsonBody({'code': 'hello', 'name': 'world', 'opt1': 'opt1', 'opt2': None})
+
+ self._put("/api/fooargs/hello", {'name': 'world', 'opt2': 'opt2'})
+ self.assertJsonBody({'code': 'hello', 'name': 'world', 'opt1': None, 'opt2': 'opt2'})
+
+ def test_detail_route(self):
+ self._get('/foo/default')
+ self.assertJsonBody({'detail': ['default', []]})
+
+ self._get('/foo/default/default')
+ self.assertJsonBody({'detail': ['default', ['default']]})
+
+ self._get('/foo/1/detail')
+ self.assertJsonBody({'detail': ['1', ['detail']]})
+
+ self._post('/foo/1/detail', 'post-data')
+ self.assertStatus(404)
+
+ def test_generate_controller_routes(self):
+ # We just need to add this controller in setup_server():
+ # noinspection PyStatementEffect
+ # pylint: disable=pointless-statement
+ GenerateControllerRoutesController
+
+
+class RequestLoggingToolTest(ControllerTestCase):
+
+ _request_logging = True
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([FooResource])
+
+ def test_is_logged(self):
+ with patch('logging.Logger.debug') as mock_logger_debug:
+ self._put('/foo/0', {'newdata': 'xyz'})
+ self.assertStatus(200)
+ call_args_list = mock_logger_debug.call_args_list
+ _, host, _, method, user, path = call_args_list[0][0]
+ self.assertEqual(host, '127.0.0.1')
+ self.assertEqual(method, 'PUT')
+ self.assertIsNone(user)
+ self.assertEqual(path, '/foo/0')
+
+
+class TestFunctions(unittest.TestCase):
+
+ def test_dict_contains_path(self):
+ x = {'a': {'b': {'c': 'foo'}}}
+ self.assertTrue(dict_contains_path(x, ['a', 'b', 'c']))
+ self.assertTrue(dict_contains_path(x, ['a', 'b', 'c']))
+ self.assertTrue(dict_contains_path(x, ['a']))
+ self.assertFalse(dict_contains_path(x, ['a', 'c']))
+ self.assertTrue(dict_contains_path(x, []))
+
+ def test_json_str_to_object(self):
+ expected_result = {'a': 1, 'b': 'bbb'}
+ self.assertEqual(expected_result, json_str_to_object('{"a": 1, "b": "bbb"}'))
+ self.assertEqual(expected_result, json_str_to_object(b'{"a": 1, "b": "bbb"}'))
+ self.assertEqual('', json_str_to_object(''))
+ self.assertRaises(TypeError, json_str_to_object, None)
+
+ def test_partial_dict(self):
+ expected_result = {'a': 1, 'c': 3}
+ self.assertEqual(expected_result, partial_dict({'a': 1, 'b': 2, 'c': 3}, ['a', 'c']))
+ self.assertEqual({}, partial_dict({'a': 1, 'b': 2, 'c': 3}, []))
+ self.assertEqual({}, partial_dict({}, []))
+ self.assertRaises(KeyError, partial_dict, {'a': 1, 'b': 2, 'c': 3}, ['d'])
+ self.assertRaises(TypeError, partial_dict, None, ['a'])
+ self.assertRaises(TypeError, partial_dict, {'a': 1, 'b': 2, 'c': 3}, None)
+
+ def test_dict_get(self):
+ self.assertFalse(dict_get({'foo': {'bar': False}}, 'foo.bar'))
+ self.assertIsNone(dict_get({'foo': {'bar': False}}, 'foo.bar.baz'))
+ self.assertEqual(dict_get({'foo': {'bar': False}, 'baz': 'xyz'}, 'baz'), 'xyz')
+
+ def test_merge_list_of_dicts_by_key(self):
+ expected_result = [{'a': 1, 'b': 2, 'c': 3}, {'a': 4, 'b': 5, 'c': 6}]
+ self.assertEqual(expected_result, merge_list_of_dicts_by_key(
+ [{'a': 1, 'b': 2}, {'a': 4, 'b': 5}], [{'a': 1, 'c': 3}, {'a': 4, 'c': 6}], 'a'))
+
+ expected_result = [{'a': 1, 'b': 2}, {'a': 4, 'b': 5, 'c': 6}]
+ self.assertEqual(expected_result, merge_list_of_dicts_by_key(
+ [{'a': 1, 'b': 2}, {'a': 4, 'b': 5}], [{}, {'a': 4, 'c': 6}], 'a'))
+ self.assertRaises(TypeError, merge_list_of_dicts_by_key, None)
diff --git a/src/pybind/mgr/dashboard/tests/test_versioning.py b/src/pybind/mgr/dashboard/tests/test_versioning.py
new file mode 100644
index 000000000..0a77a299e
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_versioning.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import unittest
+
+from ..controllers._api_router import APIRouter
+from ..controllers._rest_controller import RESTController
+from ..controllers._version import APIVersion
+from ..tests import ControllerTestCase
+
+
+@APIRouter("/vtest", secure=False)
+class VTest(RESTController):
+ RESOURCE_ID = "vid"
+
+ @RESTController.MethodMap(version=APIVersion(0, 1))
+ def list(self):
+ return {'version': ""}
+
+ def get(self):
+ return {'version': ""}
+
+ @RESTController.Collection('GET', version=APIVersion(1, 0))
+ def vmethod(self):
+ return {'version': '1.0'}
+
+ @RESTController.Collection('GET', version=APIVersion(1, 1))
+ def vmethodv1_1(self):
+ return {'version': '1.1'}
+
+ @RESTController.Collection('GET', version=APIVersion(2, 0))
+ def vmethodv2(self):
+ return {'version': '2.0'}
+
+
+class RESTVersioningTest(ControllerTestCase, unittest.TestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([VTest], "/test")
+
+ def test_list(self):
+ for (version, expected_status) in [
+ ((0, 1), 200),
+ ((2, 0), 415)
+ ]:
+ with self.subTest(version=version):
+ self._get('/test/api/vtest', version=APIVersion._make(version))
+ self.assertStatus(expected_status)
+
+ def test_v1(self):
+ for (version, expected_status) in [
+ ((1, 0), 200),
+ ((2, 0), 415)
+ ]:
+ with self.subTest(version=version):
+ self._get('/test/api/vtest/vmethod',
+ version=APIVersion._make(version))
+ self.assertStatus(expected_status)
+
+ def test_v2(self):
+ for (version, expected_status) in [
+ ((2, 0), 200),
+ ((1, 0), 415)
+ ]:
+ with self.subTest(version=version):
+ self._get('/test/api/vtest/vmethodv2',
+ version=APIVersion._make(version))
+ self.assertStatus(expected_status)
+
+ def test_backward_compatibility(self):
+ for (version, expected_status) in [
+ ((1, 1), 200),
+ ((1, 0), 200),
+ ((2, 0), 415)
+ ]:
+ with self.subTest(version=version):
+ self._get('/test/api/vtest/vmethodv1_1',
+ version=APIVersion._make(version))
+ self.assertStatus(expected_status)