summaryrefslogtreecommitdiffstats
path: root/doc/_ext
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--doc/_ext/ceph_commands.py472
-rw-r--r--doc/_ext/ceph_confval.py459
-rw-r--r--doc/_ext/ceph_releases.py351
3 files changed, 1282 insertions, 0 deletions
diff --git a/doc/_ext/ceph_commands.py b/doc/_ext/ceph_commands.py
new file mode 100644
index 000000000..861a013ea
--- /dev/null
+++ b/doc/_ext/ceph_commands.py
@@ -0,0 +1,472 @@
+import io
+import os
+import sys
+import contextlib
+
+from docutils.parsers.rst import directives
+from docutils.parsers.rst import Directive
+from jinja2 import Template
+from pcpp.preprocessor import Preprocessor
+from sphinx.util import logging
+from sphinx.util.console import bold
+from importlib import reload
+
+logger = logging.getLogger(__name__)
+
+
+class Flags:
+ NOFORWARD = (1 << 0)
+ OBSOLETE = (1 << 1)
+ DEPRECATED = (1 << 2)
+ MGR = (1 << 3)
+ POLL = (1 << 4)
+ HIDDEN = (1 << 5)
+
+ VALS = {
+ NOFORWARD: 'no_forward',
+ OBSOLETE: 'obsolete',
+ DEPRECATED: 'deprecated',
+ MGR: 'mgr',
+ POLL: 'poll',
+ HIDDEN: 'hidden',
+ }
+
+ def __init__(self, fs):
+ self.fs = fs
+
+ def __contains__(self, other):
+ return other in str(self)
+
+ def __str__(self):
+ keys = Flags.VALS.keys()
+ es = {Flags.VALS[k] for k in keys if self.fs & k == k}
+ return ', '.join(sorted(es))
+
+ def __bool__(self):
+ return bool(str(self))
+
+
+class CmdParam(object):
+ t = {
+ 'CephInt': 'int',
+ 'CephString': 'str',
+ 'CephChoices': 'str',
+ 'CephPgid': 'str',
+ 'CephOsdName': 'str',
+ 'CephPoolname': 'str',
+ 'CephObjectname': 'str',
+ 'CephUUID': 'str',
+ 'CephEntityAddr': 'str',
+ 'CephIPAddr': 'str',
+ 'CephName': 'str',
+ 'CephBool': 'bool',
+ 'CephFloat': 'float',
+ 'CephFilepath': 'str',
+ }
+
+ bash_example = {
+ 'CephInt': '1',
+ 'CephString': 'string',
+ 'CephChoices': 'choice',
+ 'CephPgid': '0',
+ 'CephOsdName': 'osd.0',
+ 'CephPoolname': 'poolname',
+ 'CephObjectname': 'objectname',
+ 'CephUUID': 'uuid',
+ 'CephEntityAddr': 'entityaddr',
+ 'CephIPAddr': '0.0.0.0',
+ 'CephName': 'name',
+ 'CephBool': 'true',
+ 'CephFloat': '0.0',
+ 'CephFilepath': '/path/to/file',
+ }
+
+ def __init__(self, type, name,
+ who=None, n=None, req=True, range=None, strings=None,
+ goodchars=None, positional=True):
+ self.type = type
+ self.name = name
+ self.who = who
+ self.n = n == 'N'
+ self.req = req != 'false'
+ self.range = range.split('|') if range else []
+ self.strings = strings.split('|') if strings else []
+ self.goodchars = goodchars
+ self.positional = positional != 'false'
+
+ assert who == None
+
+ def help(self):
+ advanced = []
+ if self.type != 'CephString':
+ advanced.append(self.type + ' ')
+ if self.range:
+ advanced.append('range= ``{}`` '.format('..'.join(self.range)))
+ if self.strings:
+ advanced.append('strings=({}) '.format(' '.join(self.strings)))
+ if self.goodchars:
+ advanced.append('goodchars= ``{}`` '.format(self.goodchars))
+ if self.n:
+ advanced.append('(can be repeated)')
+
+ advanced = advanced or ["(string)"]
+ return ' '.join(advanced)
+
+ def mk_example_value(self):
+ if self.type == 'CephChoices' and self.strings:
+ return self.strings[0]
+ if self.range:
+ return self.range[0]
+ return CmdParam.bash_example[self.type]
+
+ def mk_bash_example(self, simple):
+ val = self.mk_example_value()
+
+ if self.type == 'CephBool':
+ return '--' + self.name
+ if simple:
+ if self.type == "CephChoices" and self.strings:
+ return val
+ elif self.type == "CephString" and self.name != 'who':
+ return 'my_' + self.name
+ else:
+ return CmdParam.bash_example[self.type]
+ else:
+ return '--{}={}'.format(self.name, val)
+
+
+class CmdCommand(object):
+ def __init__(self, prefix, args, desc,
+ module=None, perm=None, flags=0, poll=None):
+ self.prefix = prefix
+ self.params = sorted([CmdParam(**arg) for arg in args],
+ key=lambda p: p.req, reverse=True)
+ self.help = desc
+ self.module = module
+ self.perm = perm
+ self.flags = Flags(flags)
+ self.needs_overload = False
+
+ def is_reasonably_simple(self):
+ if len(self.params) > 3:
+ return False
+ if any(p.n for p in self.params):
+ return False
+ return True
+
+ def mk_bash_example(self):
+ simple = self.is_reasonably_simple()
+ line = ' '.join(['ceph', self.prefix] + [p.mk_bash_example(simple) for p in self.params])
+ return line
+
+
+class Sig:
+ @staticmethod
+ def _parse_arg_desc(desc):
+ try:
+ return dict(kv.split('=', 1) for kv in desc.split(',') if kv)
+ except ValueError:
+ return desc
+
+ @staticmethod
+ def parse_cmd(cmd):
+ parsed = [Sig._parse_arg_desc(s) or s for s in cmd.split()]
+ prefix = [s for s in parsed if isinstance(s, str)]
+ params = [s for s in parsed if not isinstance(s, str)]
+ return ' '.join(prefix), params
+
+ @staticmethod
+ def parse_args(args):
+ return [Sig._parse_arg_desc(arg) for arg in args.split()]
+
+
+TEMPLATE = '''
+{%- set punct_char = '-' -%}
+{# add a header if we have multiple commands in this section #}
+{% if commands | length > 1 %}
+{{ section }}
+{{ section | length * '-' }}
+{# and demote the subsection #}
+{% set punct_char = '^' %}
+{% endif %}
+{% for command in commands %}
+{{ command.prefix }}
+{{ command.prefix | length * punct_char }}
+
+{{ command.help | wordwrap(70) }}
+
+:Example command:
+ .. code-block:: bash
+
+ {{ command.mk_bash_example() | wordwrap(70) | indent(9) }}
+
+{%- if command.params %}
+:Parameters:{% for param in command.params -%}
+{{" -" | indent(12, not loop.first) }} **{% if param.positional %}{{param.name}}{% else %}--{{param.name}}{% endif %}**: {{ param.help() }}
+{% endfor %}
+{% endif %}
+:Ceph Module: {{ command.module }}
+:Required Permissions: ``{{ command.perm }}``
+{%- if command.flags %}
+:Command Flags: ``{{ command.flags }}``
+{% endif %}
+{% endfor %}
+'''
+
+
+def group_by_prefix(commands):
+ last_prefix = None
+ grouped = []
+ for cmd in commands:
+ prefix = cmd.prefix.split(' ', 1)[0]
+ if prefix == last_prefix:
+ grouped.append(cmd)
+ elif last_prefix is None:
+ last_prefix = prefix
+ grouped = [cmd]
+ else:
+ yield last_prefix, grouped
+ last_prefix = prefix
+ grouped = [cmd]
+ assert grouped
+ yield last_prefix, grouped
+
+
+def render_commands(commands):
+ rendered = io.StringIO()
+ for section, grouped in group_by_prefix(commands):
+ logger.debug('rendering commands: %s: %d', section, len(grouped))
+ rendered.write(Template(TEMPLATE).render(
+ section=section,
+ commands=grouped))
+ return rendered.getvalue().split('\n')
+
+
+class CephMgrCommands(Directive):
+ """
+ extracts commands from specified mgr modules
+ """
+ has_content = True
+ required_arguments = 1
+ optional_arguments = 0
+ final_argument_whitespace = False
+ option_spec = {'python_path': directives.unchanged}
+
+ def _normalize_path(self, dirname):
+ my_dir = os.path.dirname(os.path.realpath(__file__))
+ src_dir = os.path.abspath(os.path.join(my_dir, '../..'))
+ return os.path.join(src_dir, dirname)
+
+ def _is_mgr_module(self, dirname, name):
+ if not os.path.isdir(os.path.join(dirname, name)):
+ return False
+ if not os.path.isfile(os.path.join(dirname, name, '__init__.py')):
+ return False
+ return name not in ['tests']
+
+ @contextlib.contextmanager
+ def mocked_modules(self):
+ # src/pybind/mgr/tests
+ from tests import mock
+ mock_imports = ['rados',
+ 'rbd',
+ 'cephfs',
+ 'dateutil',
+ 'dateutil.parser']
+ # make dashboard happy
+ mock_imports += ['OpenSSL',
+ 'jwt',
+ 'bcrypt',
+ 'jsonpatch',
+ 'rook.rook_client',
+ 'rook.rook_client.ceph',
+ 'rook.rook_client._helper',
+ 'cherrypy=3.2.3']
+ # make diskprediction_local happy
+ mock_imports += ['numpy',
+ 'scipy']
+ # make restful happy
+ mock_imports += ['pecan',
+ 'pecan.rest',
+ 'pecan.hooks',
+ 'werkzeug',
+ 'werkzeug.serving']
+
+ for m in mock_imports:
+ args = {}
+ parts = m.split('=', 1)
+ mocked = parts[0]
+ if len(parts) > 1:
+ args['__version__'] = parts[1]
+ sys.modules[mocked] = mock.Mock(**args)
+
+ try:
+ yield
+ finally:
+ for m in mock_imports:
+ mocked = m.split('=', 1)[0]
+ sys.modules.pop(mocked)
+
+ def _collect_module_commands(self, name):
+ with self.mocked_modules():
+ logger.info(bold(f"loading mgr module '{name}'..."))
+ mgr_mod = __import__(name, globals(), locals(), [], 0)
+ reload(mgr_mod)
+ from tests import M
+
+ def subclass(x):
+ try:
+ return issubclass(x, M)
+ except TypeError:
+ return False
+ ms = [c for c in mgr_mod.__dict__.values()
+ if subclass(c) and 'Standby' not in c.__name__]
+ [m] = ms
+ assert isinstance(m.COMMANDS, list)
+ return m.COMMANDS
+
+ def _normalize_command(self, command):
+ if 'handler' in command:
+ del command['handler']
+ if 'cmd' in command:
+ command['prefix'], command['args'] = Sig.parse_cmd(command['cmd'])
+ del command['cmd']
+ else:
+ command['args'] = Sig.parse_args(command['args'])
+ command['flags'] = (1 << 3)
+ command['module'] = 'mgr'
+ return command
+
+ def _render_cmds(self, commands):
+ lines = render_commands(commands)
+ assert lines
+ lineno = self.lineno - self.state_machine.input_offset - 1
+ source = self.state_machine.input_lines.source(lineno)
+ self.state_machine.insert_input(lines, source)
+
+ def run(self):
+ module_path = self._normalize_path(self.arguments[0])
+ sys.path.insert(0, module_path)
+ for path in self.options.get('python_path', '').split(':'):
+ sys.path.insert(0, self._normalize_path(path))
+ os.environ['UNITTEST'] = 'true'
+ modules = [name for name in os.listdir(module_path)
+ if self._is_mgr_module(module_path, name)]
+ commands = sum([self._collect_module_commands(name) for name in modules], [])
+ cmds = [CmdCommand(**self._normalize_command(c)) for c in commands]
+ cmds = [cmd for cmd in cmds if 'hidden' not in cmd.flags]
+ cmds = sorted(cmds, key=lambda cmd: cmd.prefix)
+ self._render_cmds(cmds)
+
+ orig_rgw_mod = sys.modules['pybind_rgw_mod']
+ sys.modules['rgw'] = orig_rgw_mod
+
+ return []
+
+
+class MyProcessor(Preprocessor):
+ def __init__(self):
+ super().__init__()
+ self.cmds = []
+ self.undef('__DATE__')
+ self.undef('__TIME__')
+ self.expand_linemacro = False
+ self.expand_filemacro = False
+ self.expand_countermacro = False
+ self.line_directive = '#line'
+ self.define("__PCPP_VERSION__ " + '')
+ self.define("__PCPP_ALWAYS_FALSE__ 0")
+ self.define("__PCPP_ALWAYS_TRUE__ 1")
+
+ def eval(self, src):
+ _cmds = []
+
+ NONE = 0
+ NOFORWARD = (1 << 0)
+ OBSOLETE = (1 << 1)
+ DEPRECATED = (1 << 2)
+ MGR = (1 << 3)
+ POLL = (1 << 4)
+ HIDDEN = (1 << 5)
+ TELL = (1 << 6)
+
+ def FLAG(a):
+ return a
+
+ def COMMAND(cmd, desc, module, perm):
+ _cmds.append({
+ 'cmd': cmd,
+ 'desc': desc,
+ 'module': module,
+ 'perm': perm
+ })
+
+ def COMMAND_WITH_FLAG(cmd, desc, module, perm, flag):
+ _cmds.append({
+ 'cmd': cmd,
+ 'desc': desc,
+ 'module': module,
+ 'perm': perm,
+ 'flags': flag
+ })
+
+ self.parse(src)
+ out = io.StringIO()
+ self.write(out)
+ out.seek(0)
+ s = out.read()
+ exec(s, globals(), locals())
+ return _cmds
+
+
+class CephMonCommands(Directive):
+ """
+ extracts commands from specified header file
+ """
+ has_content = True
+ required_arguments = 1
+ optional_arguments = 0
+ final_argument_whitespace = True
+
+ def _src_dir(self):
+ my_dir = os.path.dirname(os.path.realpath(__file__))
+ return os.path.abspath(os.path.join(my_dir, '../..'))
+
+ def _parse_headers(self, headers):
+ src_dir = self._src_dir()
+ src = '\n'.join(f'#include "{src_dir}/{header}"' for header in headers)
+ return MyProcessor().eval(src)
+
+ def _normalize_command(self, command):
+ if 'handler' in command:
+ del command['handler']
+ command['prefix'], command['args'] = Sig.parse_cmd(command['cmd'])
+ del command['cmd']
+ return command
+
+ def _render_cmds(self, commands):
+ lines = render_commands(commands)
+ assert lines
+ lineno = self.lineno - self.state_machine.input_offset - 1
+ source = self.state_machine.input_lines.source(lineno)
+ self.state_machine.insert_input(lines, source)
+
+ def run(self):
+ headers = self.arguments[0].split()
+ commands = self._parse_headers(headers)
+ cmds = [CmdCommand(**self._normalize_command(c)) for c in commands]
+ cmds = [cmd for cmd in cmds if 'hidden' not in cmd.flags]
+ cmds = sorted(cmds, key=lambda cmd: cmd.prefix)
+ self._render_cmds(cmds)
+ return []
+
+
+def setup(app):
+ app.add_directive("ceph-mgr-commands", CephMgrCommands)
+ app.add_directive("ceph-mon-commands", CephMonCommands)
+
+ return {
+ 'version': '0.1',
+ 'parallel_read_safe': True,
+ 'parallel_write_safe': True,
+ }
diff --git a/doc/_ext/ceph_confval.py b/doc/_ext/ceph_confval.py
new file mode 100644
index 000000000..cde538b45
--- /dev/null
+++ b/doc/_ext/ceph_confval.py
@@ -0,0 +1,459 @@
+import io
+import contextlib
+import os
+import sys
+from typing import Any, Dict, List, Union
+
+from docutils.nodes import Node
+from docutils.parsers.rst import directives
+from docutils.statemachine import StringList
+
+from sphinx import addnodes
+from sphinx.directives import ObjectDescription
+from sphinx.domains.python import PyField
+from sphinx.environment import BuildEnvironment
+from sphinx.locale import _
+from sphinx.util import logging, status_iterator, ws_re
+from sphinx.util.docutils import switch_source_input, SphinxDirective
+from sphinx.util.docfields import Field
+from sphinx.util.nodes import make_id
+import jinja2
+import jinja2.filters
+import yaml
+
+logger = logging.getLogger(__name__)
+
+
+TEMPLATE = '''
+{% if desc %}
+ {{ desc | wordwrap(70) | indent(3) }}
+{% endif %}
+ :type: ``{{opt.type}}``
+{%- if default is not none %}
+ {%- if opt.type == 'size' %}
+ :default: ``{{ default | eval_size | iec_size }}``
+ {%- elif opt.type == 'secs' %}
+ :default: ``{{ default | readable_duration(opt.type) }}``
+ {%- elif opt.type in ('uint', 'int', 'float') %}
+ :default: ``{{ default | readable_num(opt.type) }}``
+ {%- elif opt.type == 'millisecs' %}
+ :default: ``{{ default }}`` milliseconds
+ {%- elif opt.type == 'bool' %}
+ :default: ``{{ default | string | lower }}``
+ {%- else %}
+ :default: {{ default | literal }}
+ {%- endif -%}
+{%- endif %}
+{%- if opt.enum_values %}
+ :valid choices:{% for enum_value in opt.enum_values -%}
+{{" -" | indent(18, not loop.first) }} {{ enum_value | literal }}
+{% endfor %}
+{%- endif %}
+{%- if opt.min is defined and opt.max is defined %}
+ :allowed range: ``[{{ opt.min }}, {{ opt.max }}]``
+{%- elif opt.min is defined %}
+ :min: ``{{ opt.min }}``
+{%- elif opt.max is defined %}
+ :max: ``{{ opt.max }}``
+{%- endif %}
+{%- if opt.constraint %}
+ :constraint: {{ opt.constraint }}
+{% endif %}
+{%- if opt.policies %}
+ :policies: {{ opt.policies }}
+{% endif %}
+{%- if opt.example %}
+ :example: {{ opt.example }}
+{%- endif %}
+{%- if opt.see_also %}
+ :see also: {{ opt.see_also | map('ref_confval') | join(', ') }}
+{%- endif %}
+{% if opt.note %}
+ .. note::
+ {{ opt.note }}
+{%- endif -%}
+{%- if opt.warning %}
+ .. warning::
+ {{ opt.warning }}
+{%- endif %}
+'''
+
+
+def eval_size(value) -> int:
+ try:
+ return int(value)
+ except ValueError:
+ times = dict(_K=1 << 10,
+ _M=1 << 20,
+ _G=1 << 30,
+ _T=1 << 40)
+ for unit, m in times.items():
+ if value.endswith(unit):
+ return int(value[:-len(unit)]) * m
+ raise ValueError(f'unknown value: {value}')
+
+
+def readable_duration(value: str, typ: str) -> str:
+ try:
+ if typ == 'sec':
+ v = int(value)
+ postfix = 'second' if v == 1 else 'seconds'
+ return f'{v} {postfix}'
+ elif typ == 'float':
+ return str(float(value))
+ else:
+ return str(int(value))
+ except ValueError:
+ times = dict(_min=['minute', 'minutes'],
+ _hr=['hour', 'hours'],
+ _day=['day', 'days'])
+ for unit, readables in times.items():
+ if value.endswith(unit):
+ v = int(value[:-len(unit)])
+ postfix = readables[0 if v == 1 else 1]
+ return f'{v} {postfix}'
+ raise ValueError(f'unknown value: {value}')
+
+
+def do_plain_num(value: str, typ: str) -> str:
+ if typ == 'float':
+ return str(float(value))
+ else:
+ return str(int(value))
+
+
+def iec_size(value: int) -> str:
+ if value == 0:
+ return '0B'
+ units = dict(Ei=60,
+ Pi=50,
+ Ti=40,
+ Gi=30,
+ Mi=20,
+ Ki=10,
+ B=0)
+ for unit, bits in units.items():
+ m = 1 << bits
+ if value % m == 0:
+ value //= m
+ return f'{value}{unit}'
+ raise Exception(f'iec_size() failed to convert {value}')
+
+
+def do_fileize_num(value: str, typ: str) -> str:
+ v = eval_size(value)
+ return iec_size(v)
+
+
+def readable_num(value: str, typ: str) -> str:
+ e = ValueError()
+ for eval_func in [do_plain_num,
+ readable_duration,
+ do_fileize_num]:
+ try:
+ return eval_func(value, typ)
+ except ValueError as ex:
+ e = ex
+ raise e
+
+
+def literal(name) -> str:
+ if name:
+ return f'``{name}``'
+ else:
+ return f'<empty string>'
+
+
+def ref_confval(name) -> str:
+ return f':confval:`{name}`'
+
+
+def jinja_template() -> jinja2.Template:
+ env = jinja2.Environment()
+ env.filters['eval_size'] = eval_size
+ env.filters['iec_size'] = iec_size
+ env.filters['readable_duration'] = readable_duration
+ env.filters['readable_num'] = readable_num
+ env.filters['literal'] = literal
+ env.filters['ref_confval'] = ref_confval
+ return env.from_string(TEMPLATE)
+
+
+FieldValueT = Union[bool, float, int, str]
+
+
+class CephModule(SphinxDirective):
+ """
+ Directive to name the mgr module for which options are documented.
+ """
+ has_content = False
+ required_arguments = 1
+ optional_arguments = 0
+ final_argument_whitespace = False
+
+ def run(self) -> List[Node]:
+ module = self.arguments[0].strip()
+ if module == 'None':
+ self.env.ref_context.pop('ceph:module', None)
+ else:
+ self.env.ref_context['ceph:module'] = module
+ return []
+
+
+class CephOption(ObjectDescription):
+ """
+ emit option loaded from given command/options/<name>.yaml.in file
+ """
+ has_content = True
+ required_arguments = 1
+ optional_arguments = 0
+ final_argument_whitespace = False
+ option_spec = {
+ 'module': directives.unchanged,
+ 'default': directives.unchanged
+ }
+
+
+ doc_field_types = [
+ Field('default',
+ label=_('Default'),
+ has_arg=False,
+ names=('default',)),
+ Field('type',
+ label=_('Type'),
+ has_arg=False,
+ names=('type',),
+ bodyrolename='class'),
+ ]
+
+ template = jinja_template()
+ opts: Dict[str, Dict[str, FieldValueT]] = {}
+ mgr_opts: Dict[str, # module name
+ Dict[str, # option name
+ Dict[str, # field_name
+ FieldValueT]]] = {}
+
+ def _load_yaml(self) -> Dict[str, Dict[str, FieldValueT]]:
+ if CephOption.opts:
+ return CephOption.opts
+ opts = []
+ for fn in status_iterator(self.config.ceph_confval_imports,
+ 'loading options...', 'red',
+ len(self.config.ceph_confval_imports),
+ self.env.app.verbosity):
+ self.env.note_dependency(fn)
+ try:
+ with open(fn, 'r') as f:
+ yaml_in = io.StringIO()
+ for line in f:
+ if '@' not in line:
+ yaml_in.write(line)
+ yaml_in.seek(0)
+ opts += yaml.safe_load(yaml_in)['options']
+ except OSError as e:
+ message = f'Unable to open option file "{fn}": {e}'
+ raise self.error(message)
+ CephOption.opts = dict((opt['name'], opt) for opt in opts)
+ return CephOption.opts
+
+ def _normalize_path(self, dirname):
+ my_dir = os.path.dirname(os.path.realpath(__file__))
+ src_dir = os.path.abspath(os.path.join(my_dir, '../..'))
+ return os.path.join(src_dir, dirname)
+
+ def _is_mgr_module(self, dirname, name):
+ if not os.path.isdir(os.path.join(dirname, name)):
+ return False
+ if not os.path.isfile(os.path.join(dirname, name, '__init__.py')):
+ return False
+ return name not in ['tests']
+
+ @contextlib.contextmanager
+ def mocked_modules(self):
+ # src/pybind/mgr/tests
+ from tests import mock
+ mock_imports = ['rados',
+ 'rbd',
+ 'cephfs',
+ 'dateutil',
+ 'dateutil.parser']
+ # make dashboard happy
+ mock_imports += ['OpenSSL',
+ 'jwt',
+ 'bcrypt',
+ 'jsonpatch',
+ 'rook.rook_client',
+ 'rook.rook_client.ceph',
+ 'rook.rook_client._helper',
+ 'cherrypy=3.2.3']
+ # make diskprediction_local happy
+ mock_imports += ['numpy',
+ 'scipy']
+ # make restful happy
+ mock_imports += ['pecan',
+ 'pecan.rest',
+ 'pecan.hooks',
+ 'werkzeug',
+ 'werkzeug.serving']
+
+ for m in mock_imports:
+ args = {}
+ parts = m.split('=', 1)
+ mocked = parts[0]
+ if len(parts) > 1:
+ args['__version__'] = parts[1]
+ sys.modules[mocked] = mock.Mock(**args)
+
+ try:
+ yield
+ finally:
+ for m in mock_imports:
+ mocked = m.split('=', 1)[0]
+ sys.modules.pop(mocked)
+
+ def _collect_options_from_module(self, name):
+ with self.mocked_modules():
+ mgr_mod = __import__(name, globals(), locals(), [], 0)
+ # import 'M' from src/pybind/mgr/tests
+ from tests import M
+
+ def subclass(x):
+ try:
+ return issubclass(x, M)
+ except TypeError:
+ return False
+ ms = [c for c in mgr_mod.__dict__.values()
+ if subclass(c) and 'Standby' not in c.__name__]
+ [m] = ms
+ assert isinstance(m.MODULE_OPTIONS, list)
+ return m.MODULE_OPTIONS
+
+ def _load_module(self, module) -> Dict[str, Dict[str, FieldValueT]]:
+ mgr_opts = CephOption.mgr_opts.get(module)
+ if mgr_opts is not None:
+ return mgr_opts
+ python_path = self.config.ceph_confval_mgr_python_path
+ for path in python_path.split(':'):
+ sys.path.insert(0, self._normalize_path(path))
+ module_path = self.env.config.ceph_confval_mgr_module_path
+ module_path = self._normalize_path(module_path)
+ sys.path.insert(0, module_path)
+ if not self._is_mgr_module(module_path, module):
+ raise self.error(f'module "{module}" not found under {module_path}')
+ fn = os.path.join(module_path, module, 'module.py')
+ if os.path.exists(fn):
+ self.env.note_dependency(fn)
+ os.environ['UNITTEST'] = 'true'
+ opts = self._collect_options_from_module(module)
+ CephOption.mgr_opts[module] = dict((opt['name'], opt) for opt in opts)
+ return CephOption.mgr_opts[module]
+
+ def _current_module(self) -> str:
+ return self.options.get('module',
+ self.env.ref_context.get('ceph:module'))
+
+ def _render_option(self, name) -> str:
+ cur_module = self._current_module()
+ if cur_module:
+ try:
+ opt = self._load_module(cur_module).get(name)
+ except Exception as e:
+ message = f'Unable to load module "{cur_module}": {e}'
+ raise self.error(message)
+ else:
+ opt = self._load_yaml().get(name)
+ if opt is None:
+ raise self.error(f'Option "{name}" not found!')
+ if cur_module and 'type' not in opt:
+ # the type of module option defaults to 'str'
+ opt['type'] = 'str'
+ desc = opt.get('fmt_desc') or opt.get('long_desc') or opt.get('desc')
+ opt_default = opt.get('default')
+ default = self.options.get('default', opt_default)
+ try:
+ return self.template.render(opt=opt,
+ desc=desc,
+ default=default)
+ except Exception as e:
+ message = (f'Unable to render option "{name}": {e}. ',
+ f'opt={opt}, desc={desc}, default={default}')
+ raise self.error(message)
+
+ def handle_signature(self,
+ sig: str,
+ signode: addnodes.desc_signature) -> str:
+ signode.clear()
+ signode += addnodes.desc_name(sig, sig)
+ # normalize whitespace like XRefRole does
+ name = ws_re.sub(' ', sig)
+ cur_module = self._current_module()
+ if cur_module:
+ return '/'.join(['mgr', cur_module, name])
+ else:
+ return name
+
+ def transform_content(self, contentnode: addnodes.desc_content) -> None:
+ name = self.arguments[0]
+ source, lineno = self.get_source_info()
+ source = f'{source}:{lineno}:<confval>'
+ fields = StringList(self._render_option(name).splitlines() + [''],
+ source=source, parent_offset=lineno)
+ with switch_source_input(self.state, fields):
+ self.state.nested_parse(fields, 0, contentnode)
+
+ def add_target_and_index(self,
+ name: str,
+ sig: str,
+ signode: addnodes.desc_signature) -> None:
+ node_id = make_id(self.env, self.state.document, self.objtype, name)
+ signode['ids'].append(node_id)
+ self.state.document.note_explicit_target(signode)
+ entry = f'{name}; configuration option'
+ self.indexnode['entries'].append(('pair', entry, node_id, '', None))
+ std = self.env.get_domain('std')
+ std.note_object(self.objtype, name, node_id, location=signode)
+
+
+def _reset_ref_context(app, env, docname):
+ env.ref_context.pop('ceph:module', None)
+
+
+def setup(app) -> Dict[str, Any]:
+ app.add_config_value('ceph_confval_imports',
+ default=[],
+ rebuild='html',
+ types=[str])
+ app.add_config_value('ceph_confval_mgr_module_path',
+ default=[],
+ rebuild='html',
+ types=[str])
+ app.add_config_value('ceph_confval_mgr_python_path',
+ default=[],
+ rebuild='',
+ types=[str])
+ app.add_object_type(
+ 'confsec',
+ 'confsec',
+ objname='configuration section',
+ indextemplate='pair: %s; configuration section',
+ doc_field_types=[
+ Field(
+ 'example',
+ label=_('Example'),
+ has_arg=False,
+ )]
+ )
+ app.add_object_type(
+ 'confval',
+ 'confval',
+ objname='configuration option',
+ )
+ app.add_directive_to_domain('std', 'mgr_module', CephModule)
+ app.add_directive_to_domain('std', 'confval', CephOption, override=True)
+ app.connect('env-purge-doc', _reset_ref_context)
+
+ return {
+ 'version': 'builtin',
+ 'parallel_read_safe': True,
+ 'parallel_write_safe': True,
+ }
diff --git a/doc/_ext/ceph_releases.py b/doc/_ext/ceph_releases.py
new file mode 100644
index 000000000..94e92ffdd
--- /dev/null
+++ b/doc/_ext/ceph_releases.py
@@ -0,0 +1,351 @@
+# cobbled together from:
+# https://github.com/sphinx-contrib/documentedlist/blob/master/sphinxcontrib/documentedlist.py
+# https://github.com/sphinx-doc/sphinx/blob/v1.6.3/sphinx/ext/graphviz.py
+# https://github.com/thewtex/sphinx-contrib/blob/master/exceltable/sphinxcontrib/exceltable.py
+# https://bitbucket.org/prometheus/sphinxcontrib-htsql/src/331a542c29a102eec9f8cba44797e53a49de2a49/sphinxcontrib/htsql.py?at=default&fileviewer=file-view-default
+# into the glory that follows:
+import json
+import yaml
+import jinja2
+import sphinx
+import datetime
+from docutils.parsers.rst import Directive
+from docutils import nodes
+from sphinx.util import logging
+
+logger = logging.getLogger(__name__)
+
+
+class CephReleases(Directive):
+ has_content = False
+ required_arguments = 2
+ optional_arguments = 0
+ option_spec = {}
+
+ def run(self):
+ filename = self.arguments[0]
+ current = self.arguments[1] == 'current'
+ document = self.state.document
+ env = document.settings.env
+ rel_filename, filename = env.relfn2path(filename)
+ env.note_dependency(filename)
+ try:
+ with open(filename, 'r') as fp:
+ releases = yaml.safe_load(fp)
+ releases = releases["releases"]
+ except Exception as e:
+ return [document.reporter.warning(
+ "Failed to open Ceph releases file {}: {}".format(filename, e),
+ line=self.lineno)]
+
+ table = nodes.table()
+ tgroup = nodes.tgroup(cols=3)
+ table += tgroup
+
+ tgroup.extend(
+ nodes.colspec(colwidth=30, colname='c'+str(idx))
+ for idx, _ in enumerate(range(4)))
+
+ thead = nodes.thead()
+ tgroup += thead
+ row_node = nodes.row()
+ thead += row_node
+ row_node.extend(
+ nodes.entry(h, nodes.paragraph(text=h))
+ for h in ["Name", "Initial release", "Latest",
+ "End of life (estimated)" if current else "End of life"])
+
+ releases = releases.items()
+ releases = sorted(releases, key=lambda t: t[0], reverse=True)
+
+ tbody = nodes.tbody()
+ tgroup += tbody
+
+ rows = []
+ for code_name, info in releases:
+ actual_eol = info.get("actual_eol", None)
+
+ if current:
+ if actual_eol and actual_eol <= datetime.datetime.now().date():
+ continue
+ else:
+ if not actual_eol:
+ continue
+
+ trow = nodes.row()
+
+ entry = nodes.entry()
+ para = nodes.paragraph(text=f"`{code_name.title()} <{code_name}>`_")
+ sphinx.util.nodes.nested_parse_with_titles(
+ self.state, para, entry)
+ #entry += para
+ trow += entry
+
+ sorted_releases = sorted(info["releases"],
+ key=lambda t: [t["released"]] + list(map(lambda v: int(v), t["version"].split("."))))
+ oldest_release = sorted_releases[0]
+ newest_release = sorted_releases[-1]
+
+ entry = nodes.entry()
+ para = nodes.paragraph(text="{}".format(
+ oldest_release["released"]))
+ entry += para
+ trow += entry
+
+ entry = nodes.entry()
+ if newest_release.get("skip_ref", False):
+ para = nodes.paragraph(text="{}".format(
+ newest_release["version"]))
+ else:
+ para = nodes.paragraph(text="`{}`_".format(
+ newest_release["version"]))
+ sphinx.util.nodes.nested_parse_with_titles(
+ self.state, para, entry)
+ #entry += para
+ trow += entry
+
+ entry = nodes.entry()
+ if current:
+ para = nodes.paragraph(text=info.get("target_eol", '--'))
+ else:
+ para = nodes.paragraph(text=info.get('actual_eol', '--'))
+ entry += para
+ trow += entry
+
+ rows.append(trow)
+
+ tbody.extend(rows)
+
+ return [table]
+
+
+RELEASES_TEMPLATE = '''
+.. mermaid::
+
+ gantt
+ dateFormat YYYY-MM-DD
+ axisFormat %Y
+ section Active Releases
+{% for release in active_releases %}
+ {{ release.code_name }} (latest {{ release.last_version }}): done, {{ release.debute_date }},{{ release.lifetime }}d
+{% endfor %}
+ section Archived Releases
+{% for release in archived_releases %}
+ {{ release.code_name }} (latest {{ release.last_version }}): done, {{ release.debute_date }},{{ release.lifetime }}d
+{% endfor %}
+'''
+
+
+class ReleasesGantt(Directive):
+ has_content = True
+ required_arguments = 1
+ optional_arguments = 0
+ final_argument_whitespace = False
+
+ template = jinja2.Environment().from_string(RELEASES_TEMPLATE)
+
+ def _render_time_line(self, filename):
+ try:
+ with open(filename) as f:
+ releases = yaml.safe_load(f)['releases']
+ except Exception as e:
+ message = f'Unable read release file: "{filename}": {e}'
+ self.error(message)
+
+ active_releases = []
+ archived_releases = []
+ # just update `releases` with extracted info
+ for code_name, info in releases.items():
+ last_release = info['releases'][0]
+ first_release = info['releases'][-1]
+ last_version = last_release['version']
+ debute_date = first_release['released']
+ if 'actual_eol' in info:
+ lifetime = info['actual_eol'] - first_release['released']
+ else:
+ lifetime = info['target_eol'] - first_release['released']
+ release = dict(code_name=code_name,
+ last_version=last_version,
+ debute_date=debute_date,
+ lifetime=lifetime.days)
+ if 'actual_eol' in info:
+ archived_releases.append(release)
+ else:
+ active_releases.append(release)
+ rendered = self.template.render(active_releases=active_releases,
+ archived_releases=archived_releases)
+ return rendered.splitlines()
+
+ def run(self):
+ filename = self.arguments[0]
+ document = self.state.document
+ env = document.settings.env
+ rel_filename, filename = env.relfn2path(filename)
+ env.note_dependency(filename)
+ lines = self._render_time_line(filename)
+ lineno = self.lineno - self.state_machine.input_offset - 1
+ source = self.state_machine.input_lines.source(lineno)
+ self.state_machine.insert_input(lines, source)
+ return []
+
+
+class CephTimeline(Directive):
+ has_content = False
+ required_arguments = 3
+ optional_arguments = 0
+ option_spec = {}
+
+ def run(self):
+ filename = self.arguments[0]
+ document = self.state.document
+ env = document.settings.env
+ rel_filename, filename = env.relfn2path(filename)
+ env.note_dependency(filename)
+ try:
+ with open(filename, 'r') as fp:
+ releases = yaml.safe_load(fp)
+ except Exception as e:
+ return [document.reporter.warning(
+ "Failed to open Ceph releases file {}: {}".format(filename, e),
+ line=self.lineno)]
+
+ display_releases = self.arguments[1:]
+
+ timeline = []
+ for code_name, info in releases["releases"].items():
+ if code_name in display_releases:
+ for release in info.get("releases", []):
+ released = release["released"]
+ timeline.append((released, code_name, release["version"],
+ release.get("skip_ref", False)))
+
+ assert "development" not in releases["releases"]
+ if "development" in display_releases:
+ for release in releases["development"]["releases"]:
+ released = release["released"]
+ timeline.append((released, "development", release["version"],
+ release.get("skip_ref", False)))
+
+ timeline = sorted(timeline, key=lambda t: t[0], reverse=True)
+
+ table = nodes.table()
+ tgroup = nodes.tgroup(cols=3)
+ table += tgroup
+
+ columns = ["Date"] + display_releases
+ tgroup.extend(
+ nodes.colspec(colwidth=30, colname='c'+str(idx))
+ for idx, _ in enumerate(range(len(columns))))
+
+ thead = nodes.thead()
+ tgroup += thead
+ row_node = nodes.row()
+ thead += row_node
+ for col in columns:
+ entry = nodes.entry()
+ if col.lower() in ["date", "development"]:
+ para = nodes.paragraph(text=col.title())
+ else:
+ para = nodes.paragraph(text=f"`{col.title()} <{col}>`_".format(col))
+ sphinx.util.nodes.nested_parse_with_titles(
+ self.state, para, entry)
+ row_node += entry
+
+ tbody = nodes.tbody()
+ tgroup += tbody
+
+ rows = []
+ for row_info in timeline:
+ trow = nodes.row()
+
+ entry = nodes.entry()
+ para = nodes.paragraph(text=row_info[0])
+ entry += para
+ trow += entry
+
+ for release in display_releases:
+ entry = nodes.entry()
+ if row_info[1] == release:
+ if row_info[3]: # if skip ref
+ para = nodes.paragraph(text=row_info[2])
+ else:
+ para = nodes.paragraph(text="`{}`_".format(row_info[2]))
+ sphinx.util.nodes.nested_parse_with_titles(
+ self.state, para, entry)
+ else:
+ para = nodes.paragraph(text="--")
+ entry += para
+ trow += entry
+ rows.append(trow)
+
+ tbody.extend(rows)
+
+ return [table]
+
+
+TIMELINE_TEMPLATE = '''
+.. mermaid::
+
+ gantt
+ dateFormat YYYY-MM-DD
+ axisFormat %Y-%m
+{% if title %}
+ title {{title}}
+{% endif %}
+{% for display_release in display_releases %}
+ section {{ display_release }}
+{%if releases[display_release].actual_eol %}
+ End of life: crit, {{ releases[display_release].actual_eol }},4d
+{% else %}
+ End of life (estimated): crit, {{ releases[display_release].target_eol }},4d
+{% endif %}
+{% for release in releases[display_release].releases | sort(attribute='released', reverse=True) %}
+ {{ release.version }}: milestone, done, {{ release.released }},0d
+{% endfor %}
+{% endfor %}
+'''
+
+
+class TimeLineGantt(Directive):
+ has_content = True
+ required_arguments = 2
+ optional_arguments = 0
+ final_argument_whitespace = True
+
+ template = jinja2.Environment().from_string(TIMELINE_TEMPLATE)
+
+ def _render_time_line(self, filename, display_releases):
+ try:
+ with open(filename) as f:
+ releases = yaml.safe_load(f)['releases']
+ except Exception as e:
+ message = f'Unable read release file: "{filename}": {e}'
+ self.error(message)
+
+ rendered = self.template.render(display_releases=display_releases,
+ releases=releases)
+ return rendered.splitlines()
+
+ def run(self):
+ filename = self.arguments[0]
+ display_releases = self.arguments[1].split()
+ document = self.state.document
+ env = document.settings.env
+ rel_filename, filename = env.relfn2path(filename)
+ env.note_dependency(filename)
+ lines = self._render_time_line(filename, display_releases)
+ lineno = self.lineno - self.state_machine.input_offset - 1
+ source = self.state_machine.input_lines.source(lineno)
+ self.state_machine.insert_input(lines, source)
+ return []
+
+
+def setup(app):
+ app.add_directive('ceph_releases', CephReleases)
+ app.add_directive('ceph_releases_gantt', ReleasesGantt)
+ app.add_directive('ceph_timeline', CephTimeline)
+ app.add_directive('ceph_timeline_gantt', TimeLineGantt)
+ return {
+ 'parallel_read_safe': True,
+ 'parallel_write_safe': True
+ }