summaryrefslogtreecommitdiffstats
path: root/tools/moztreedocs
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--tools/moztreedocs/__init__.py232
-rw-r--r--tools/moztreedocs/docs/adding-documentation.rst30
-rw-r--r--tools/moztreedocs/docs/index.rst23
-rw-r--r--tools/moztreedocs/docs/jsdoc-support.rst16
-rw-r--r--tools/moztreedocs/docs/mdn-import.rst28
-rw-r--r--tools/moztreedocs/docs/mermaid-integration.rst72
-rw-r--r--tools/moztreedocs/docs/nested-docs.rst14
-rw-r--r--tools/moztreedocs/docs/redirect.rst11
-rw-r--r--tools/moztreedocs/docs/rstlint.rst12
-rw-r--r--tools/moztreedocs/docs/run-try-job.rst25
-rw-r--r--tools/moztreedocs/docs/server-synchronization.rst5
-rw-r--r--tools/moztreedocs/mach_commands.py447
-rw-r--r--tools/moztreedocs/package.py31
-rw-r--r--tools/moztreedocs/requirements.in57
-rw-r--r--tools/moztreedocs/requirements.txt290
-rw-r--r--tools/moztreedocs/upload.py175
16 files changed, 1468 insertions, 0 deletions
diff --git a/tools/moztreedocs/__init__.py b/tools/moztreedocs/__init__.py
new file mode 100644
index 0000000000..955f458ae2
--- /dev/null
+++ b/tools/moztreedocs/__init__.py
@@ -0,0 +1,232 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals, print_function
+
+import os
+import tempfile
+import yaml
+
+from mozbuild.base import MozbuildObject
+from mozbuild.frontend.reader import BuildReader
+from mozbuild.util import memoize
+from mozpack.copier import FileCopier
+from mozpack.files import FileFinder
+from mozpack.manifests import InstallManifest
+from pathlib import PurePath
+
+import frontmatter
+import sphinx
+import sphinx.ext.apidoc
+
+here = os.path.abspath(os.path.dirname(__file__))
+build = MozbuildObject.from_environment(cwd=here)
+
+MAIN_DOC_PATH = os.path.normpath(os.path.join(build.topsrcdir, "docs"))
+
+logger = sphinx.util.logging.getLogger(__name__)
+
+
+@memoize
+def read_build_config(docdir):
+ """Read the active build config and return the relevant doc paths.
+
+ The return value is cached so re-generating with the same docdir won't
+ invoke the build system a second time."""
+ trees = {}
+ python_package_dirs = set()
+
+ is_main = docdir == MAIN_DOC_PATH
+ relevant_mozbuild_path = None if is_main else docdir
+
+ # Reading the Sphinx variables doesn't require a full build context.
+ # Only define the parts we need.
+ class fakeconfig(object):
+ topsrcdir = build.topsrcdir
+
+ variables = ("SPHINX_TREES", "SPHINX_PYTHON_PACKAGE_DIRS")
+ reader = BuildReader(fakeconfig())
+ result = reader.find_variables_from_ast(variables, path=relevant_mozbuild_path)
+ for path, name, key, value in result:
+ reldir = os.path.dirname(path)
+
+ if name == "SPHINX_TREES":
+ # If we're building a subtree, only process that specific subtree.
+ absdir = os.path.join(build.topsrcdir, reldir, value)
+ if not is_main and absdir not in (docdir, MAIN_DOC_PATH):
+ continue
+
+ assert key
+ if key.startswith("/"):
+ key = key[1:]
+ else:
+ key = os.path.normpath(os.path.join(reldir, key))
+
+ if key in trees:
+ raise Exception(
+ "%s has already been registered as a destination." % key
+ )
+ trees[key] = os.path.join(reldir, value)
+
+ if name == "SPHINX_PYTHON_PACKAGE_DIRS":
+ python_package_dirs.add(os.path.join(reldir, value))
+
+ return trees, python_package_dirs
+
+
+class _SphinxManager(object):
+ """Manages the generation of Sphinx documentation for the tree."""
+
+ def __init__(self, topsrcdir, main_path):
+ self.topsrcdir = topsrcdir
+ self.conf_py_path = os.path.join(main_path, "conf.py")
+ self.index_path = os.path.join(main_path, "index.rst")
+
+ # Instance variables that get set in self.generate_docs()
+ self.staging_dir = None
+ self.trees = None
+ self.python_package_dirs = None
+
+ def generate_docs(self, app):
+ """Generate/stage documentation."""
+ self.staging_dir = os.path.join(app.outdir, "_staging")
+
+ logger.info("Reading Sphinx metadata from build configuration")
+ self.trees, self.python_package_dirs = read_build_config(app.srcdir)
+
+ logger.info("Staging static documentation")
+ self._synchronize_docs(app)
+
+ logger.info("Generating Python API documentation")
+ self._generate_python_api_docs()
+
+ def _generate_python_api_docs(self):
+ """Generate Python API doc files."""
+ out_dir = os.path.join(self.staging_dir, "python")
+ base_args = ["--no-toc", "-o", out_dir]
+
+ for p in sorted(self.python_package_dirs):
+ full = os.path.join(self.topsrcdir, p)
+
+ finder = FileFinder(full)
+ dirs = {os.path.dirname(f[0]) for f in finder.find("**")}
+
+ test_dirs = {"test", "tests"}
+ excludes = {d for d in dirs if set(PurePath(d).parts) & test_dirs}
+
+ args = list(base_args)
+ args.append(full)
+ args.extend(excludes)
+
+ sphinx.ext.apidoc.main(argv=args)
+
+ def _process_markdown(self, m, markdown_file, dest):
+ """
+ When dealing with a markdown file, we check if we have a front matter.
+ If this is the case, we read the information, create a temporary file,
+ reuse the front matter info into the md file
+ """
+ with open(markdown_file, "r") as f:
+ # Load the front matter header
+ post = frontmatter.load(f)
+ if len(post.keys()) > 0:
+ # Has a front matter, use it
+ with tempfile.NamedTemporaryFile("w", delete=False) as fh:
+ # Use the frontmatter title
+ fh.write(post["title"] + "\n")
+ # Add the md syntax for the title
+ fh.write("=" * len(post["title"]) + "\n")
+ # If there is a summary, add it
+ if "summary" in post:
+ fh.write(post["summary"] + "\n")
+ # Write the content
+ fh.write(post.__str__())
+ fh.close()
+ # Instead of a symlink, we copy the file
+ m.add_copy(fh.name, dest)
+ else:
+ # No front matter, create the symlink like for rst
+ # as it will be the the same file
+ m.add_link(markdown_file, dest)
+
+ def _synchronize_docs(self, app):
+ m = InstallManifest()
+
+ with open(os.path.join(MAIN_DOC_PATH, "config.yml"), "r") as fh:
+ tree_config = yaml.safe_load(fh)["categories"]
+
+ m.add_link(self.conf_py_path, "conf.py")
+
+ for dest, source in sorted(self.trees.items()):
+ source_dir = os.path.join(self.topsrcdir, source)
+ for root, _, files in os.walk(source_dir):
+ for f in files:
+ source_path = os.path.normpath(os.path.join(root, f))
+ rel_source = source_path[len(source_dir) + 1 :]
+ target = os.path.normpath(os.path.join(dest, rel_source))
+ if source_path.endswith(".md"):
+ self._process_markdown(
+ m, source_path, os.path.join(".", target)
+ )
+ else:
+ m.add_link(source_path, target)
+
+ copier = FileCopier()
+ m.populate_registry(copier)
+ copier.copy(self.staging_dir, remove_empty_directories=False)
+
+ with open(self.index_path, "r") as fh:
+ data = fh.read()
+
+ def is_toplevel(key):
+ """Whether the tree is nested under the toplevel index, or is
+ nested under another tree's index.
+ """
+ for k in self.trees:
+ if k == key:
+ continue
+ if key.startswith(k):
+ return False
+ return True
+
+ def format_paths(paths):
+ source_doc = ["%s/index" % p for p in paths]
+ return "\n ".join(source_doc)
+
+ toplevel_trees = {k: v for k, v in self.trees.items() if is_toplevel(k)}
+
+ CATEGORIES = {}
+ # generate the datastructure to deal with the tree
+ for t in tree_config:
+ CATEGORIES[t] = format_paths(tree_config[t])
+
+ # During livereload, we don't correctly rebuild the full document
+ # tree (Bug 1557020). The page is no longer referenced within the index
+ # tree, thus we shall check categorisation only if complete tree is being rebuilt.
+ if app.srcdir == self.topsrcdir:
+ indexes = set(
+ [
+ os.path.normpath(os.path.join(p, "index"))
+ for p in toplevel_trees.keys()
+ ]
+ )
+ # Format categories like indexes
+ cats = "\n".join(CATEGORIES.values()).split("\n")
+ # Remove heading spaces
+ cats = [os.path.normpath(x.strip()) for x in cats]
+ indexes = tuple(set(indexes) - set(cats))
+ if indexes:
+ # In case a new doc isn't categorized
+ print(indexes)
+ raise Exception(
+ "Uncategorized documentation. Please add it in docs/config.yml"
+ )
+
+ data = data.format(**CATEGORIES)
+
+ with open(os.path.join(self.staging_dir, "index.rst"), "w") as fh:
+ fh.write(data)
+
+
+manager = _SphinxManager(build.topsrcdir, MAIN_DOC_PATH)
diff --git a/tools/moztreedocs/docs/adding-documentation.rst b/tools/moztreedocs/docs/adding-documentation.rst
new file mode 100644
index 0000000000..9abb6a2f84
--- /dev/null
+++ b/tools/moztreedocs/docs/adding-documentation.rst
@@ -0,0 +1,30 @@
+Adding Documentation
+--------------------
+
+To add new documentation, define the ``SPHINX_TREES`` and
+``SPHINX_PYTHON_PACKAGE_DIRS`` variables in ``moz.build`` files in
+the tree and documentation will automatically get picked up.
+
+Say you have a directory ``featureX`` you would like to write some
+documentation for. Here are the steps to create Sphinx documentation
+for it:
+
+1. Create a directory for the docs. This is typically ``docs``. e.g.
+ ``featureX/docs``.
+2. Create an ``index.rst`` file in this directory. The ``index.rst`` file
+ is the root documentation for that section. See ``build/docs/index.rst``
+ for an example file.
+3. In a ``moz.build`` file (typically the one in the parent directory of
+ the ``docs`` directory), define ``SPHINX_TREES`` to hook up the plumbing.
+ e.g. ``SPHINX_TREES['featureX'] = 'docs'``. This says *the ``docs``
+ directory under the current directory should be installed into the
+ Sphinx documentation tree under ``/featureX``*.
+4. If you have Python packages you would like to generate Python API
+ documentation for, you can use ``SPHINX_PYTHON_PACKAGE_DIRS`` to
+ declare directories containing Python packages. e.g.
+ ``SPHINX_PYTHON_PACKAGE_DIRS += ['mozpackage']``.
+5. In ``docs/config.yml``, defines in which category the doc
+ should go.
+6. Verify the rst syntax using `./mach lint -l rst`_
+
+.. _./mach lint -l rst: /tools/lint/linters/rstlinter.html
diff --git a/tools/moztreedocs/docs/index.rst b/tools/moztreedocs/docs/index.rst
new file mode 100644
index 0000000000..bb96a759de
--- /dev/null
+++ b/tools/moztreedocs/docs/index.rst
@@ -0,0 +1,23 @@
+Managing Documentation
+======================
+
+Documentation is hard. It's difficult to write, difficult to find and always out
+of date. That's why we implemented our in-tree documentation system that
+underpins firefox-source-docs.mozilla.org. The documentation lives next to the
+code that it documents, so it can be updated within the same commit that makes
+the underlying changes.
+
+This documentation is generated via the
+`Sphinx <http://sphinx-doc.org/>`_ tool from sources in the tree.
+
+To build the documentation, run ``mach doc``. Run
+``mach help doc`` to see configurable options.
+
+The review group in Phabricator is ``#firefox-source-docs-reviewers``.
+
+.. toctree::
+ :caption: Documentation
+ :maxdepth: 2
+ :glob:
+
+ *
diff --git a/tools/moztreedocs/docs/jsdoc-support.rst b/tools/moztreedocs/docs/jsdoc-support.rst
new file mode 100644
index 0000000000..100fb92dac
--- /dev/null
+++ b/tools/moztreedocs/docs/jsdoc-support.rst
@@ -0,0 +1,16 @@
+jsdoc support
+=============
+
+Here is a quick example, for the public AddonManager :ref:`API <AddonManager Reference>`
+
+To use it for your own code:
+
+#. Check that JSDoc generates the output you expect (you may need to use a @class annotation on "object initializer"-style class definitions for instance)
+
+#. Create an `.rst file`, which may contain explanatory text as well as the API docs. The minimum will look something like
+ `this <https://firefox-source-docs.mozilla.org/_sources/toolkit/mozapps/extensions/addon-manager/AddonManager.rst.txt>`__
+
+#. Ensure your component is on the js_source_path here in the sphinx
+ config: https://hg.mozilla.org/mozilla-central/file/72ee4800d415/tools/docs/conf.py#l46
+
+#. Run `mach doc` locally to generate the output and confirm that it looks correct.
diff --git a/tools/moztreedocs/docs/mdn-import.rst b/tools/moztreedocs/docs/mdn-import.rst
new file mode 100644
index 0000000000..9de78b6213
--- /dev/null
+++ b/tools/moztreedocs/docs/mdn-import.rst
@@ -0,0 +1,28 @@
+Importing documentation from MDN
+--------------------------------
+
+As MDN should not be used for documenting mozilla-central specific code or process,
+the documentation should be migrated in this repository.
+
+Fortunatelly, there is an easy way to import the doc from MDN
+to the firefox source docs.
+
+1. Install https://pandoc.org/
+
+2. Add a ``?raw=1`` add the end of the MDN URL
+
+3. Run pandoc the following way:
+
+.. code-block:: shell
+
+ $ pandoc -t rst https://wiki.developer.mozilla.org/docs/Web/JavaScript?raw\=1 > doc.rst
+
+4. Verify the rst syntax using `./mach lint -l rst`_
+
+.. _./mach lint -l rst: /tools/lint/linters/rstlinter.html
+
+5. If relevant, remove unbreakable spaces (rendered with a "!" on Phabricator)
+
+.. code-block:: shell
+
+ $ sed -i -e 's/\xc2\xa0/ /g' doc.rst
diff --git a/tools/moztreedocs/docs/mermaid-integration.rst b/tools/moztreedocs/docs/mermaid-integration.rst
new file mode 100644
index 0000000000..d56fb8e930
--- /dev/null
+++ b/tools/moztreedocs/docs/mermaid-integration.rst
@@ -0,0 +1,72 @@
+Mermaid Integration
+===================
+
+Mermaid is a tool that lets you generate flow charts, sequence diagrams, gantt
+charts, class diagrams and vcs graphs from a simple markup language. This
+allows charts and diagrams to be embedded and edited directly in the
+documentation source files rather than creating them as images using some
+external tool and checking the images into the tree.
+
+To add a diagram, simply put something like this into your page:
+
+.. These two examples come from the upstream website (https://mermaid-js.github.io/mermaid/#/)
+
+.. code-block:: shell
+
+ .. mermaid::
+
+ graph TD;
+ A-->B;
+ A-->C;
+ B-->D;
+ C-->D;
+
+The result will be:
+
+.. mermaid::
+
+ graph TD;
+ A-->B;
+ A-->C;
+ B-->D;
+ C-->D;
+
+Or
+
+.. code-block:: shell
+
+ .. mermaid::
+
+ sequenceDiagram
+ participant Alice
+ participant Bob
+ Alice->>John: Hello John, how are you?
+ loop Healthcheck
+ John->>John: Fight against hypochondria
+ end
+ Note right of John: Rational thoughts <br/>prevail!
+ John-->>Alice: Great!
+ John->>Bob: How about you?
+ Bob-->>John: Jolly good!
+
+will show:
+
+.. mermaid::
+
+ sequenceDiagram
+ participant Alice
+ participant Bob
+ Alice->>John: Hello John, how are you?
+ loop Healthcheck
+ John->>John: Fight against hypochondria
+ end
+ Note right of John: Rational thoughts <br/>prevail!
+ John-->>Alice: Great!
+ John->>Bob: How about you?
+ Bob-->>John: Jolly good!
+
+
+See `Mermaid's official <https://mermaid-js.github.io/mermaid/#/>`__ docs for
+more details on the syntax, and use the
+`Mermaid Live Editor <https://mermaidjs.github.io/mermaid-live-editor/>`__ to
+experiment with creating your own diagrams.
diff --git a/tools/moztreedocs/docs/nested-docs.rst b/tools/moztreedocs/docs/nested-docs.rst
new file mode 100644
index 0000000000..e2eb03b42d
--- /dev/null
+++ b/tools/moztreedocs/docs/nested-docs.rst
@@ -0,0 +1,14 @@
+Nested Doc Trees
+================
+
+This feature essentially means we can now group related docs together under
+common "landing pages". This will allow us to refactor the docs into a structure that makes more sense. For example we could have a landing page for docs describing Gecko's internals, and another one for docs describing developer workflows in `mozilla-central`.
+
+
+To clarify a few things:
+
+#. The path specified in `SPHINX_TREES` does not need to correspond to a path in `mozilla-central`. For example, I could register my docs using `SPHINX_TREES["/foo"] = "docs"`, which would make that doc tree accessible at `firefox-source-docs.mozilla.org/foo`.
+
+#. Any subtrees that are nested under another index will automatically be hidden from the main index. This means you should make sure to link to any subtrees from somewhere in the landing page. So given my earlier doc tree at `/foo`, if I now created a subtree and registered it using `SPHINX_TREES["/foo/bar"] = "docs"`, those docs would not show up in the main index.
+
+#. The relation between subtrees and their parents does not necessarily have any bearing with their relation on the file system. For example, a doc tree that lives under `/devtools` can be nested under an index that lives under `/browser`.
diff --git a/tools/moztreedocs/docs/redirect.rst b/tools/moztreedocs/docs/redirect.rst
new file mode 100644
index 0000000000..6ec29cdfd0
--- /dev/null
+++ b/tools/moztreedocs/docs/redirect.rst
@@ -0,0 +1,11 @@
+Redirects
+=========
+
+We now have the ability to define redirects in-tree! This will allow us to
+refactor and move docs around to our hearts content without needing to worry
+about stale external URLs. To set up a redirect simply add a line to this file under ``redirects`` key:
+
+https://searchfox.org/mozilla-central/source/docs/config.yml
+
+Any request starting with the prefix on the left, will be rewritten to the prefix on the right by the server. So for example a request to
+``/testing/marionette/marionette/index.html`` will be re-written to ``/testing/marionette/index.html``. Amazon's API only supports prefix redirects, so anything more complex isn't supported.
diff --git a/tools/moztreedocs/docs/rstlint.rst b/tools/moztreedocs/docs/rstlint.rst
new file mode 100644
index 0000000000..230ba2e812
--- /dev/null
+++ b/tools/moztreedocs/docs/rstlint.rst
@@ -0,0 +1,12 @@
+ReStructuredText Linter
+-----------------------
+
+RST isn't the easiest of markup languages, but it's powerful and what `Sphinx` (the library used to build our docs) uses, so we're stuck with it. But at least we now have a linter which will catch basic problems in `.rst` files early. Be sure to run:
+
+.. code-block:: shell
+
+ mach lint -l rst
+
+to test your outgoing changes before submitting to review.
+
+`More information <RST Linter>`__.
diff --git a/tools/moztreedocs/docs/run-try-job.rst b/tools/moztreedocs/docs/run-try-job.rst
new file mode 100644
index 0000000000..57f0aab570
--- /dev/null
+++ b/tools/moztreedocs/docs/run-try-job.rst
@@ -0,0 +1,25 @@
+Running a try job for Documentation
+-----------------------------------
+
+Documentation has two try jobs associated :
+
+ - ``doc-generate`` - This generates the documentation with the committed changes on the try server and gives the same output as if it has landed on regular integration branch.
+
+ .. code-block:: shell
+
+ mach try fuzzy -q "'doc-generate"
+
+ - ``doc-upload`` - This uploads documentation to `gecko-l1 bucket <http://gecko-docs.mozilla.org-l1.s3.us-west-2.amazonaws.com/index.html>`__ with the committed changes.
+
+ .. code-block:: shell
+
+ mach try fuzzy -q "'doc-upload"
+
+.. important::
+
+ Running try jobs require the user to have try server access.
+
+.. note::
+
+ To learn more about setting up try server or
+ using a different selector head over to :ref:`try server documentation <Try Server>`
diff --git a/tools/moztreedocs/docs/server-synchronization.rst b/tools/moztreedocs/docs/server-synchronization.rst
new file mode 100644
index 0000000000..b47b66503e
--- /dev/null
+++ b/tools/moztreedocs/docs/server-synchronization.rst
@@ -0,0 +1,5 @@
+Server Synchronization
+======================
+
+We now compare all the files that exist on the server against the list of source files in `mozilla-central`.
+Any files on the server that no longer exist in `mozilla-central` are removed.
diff --git a/tools/moztreedocs/mach_commands.py b/tools/moztreedocs/mach_commands.py
new file mode 100644
index 0000000000..4309f583f3
--- /dev/null
+++ b/tools/moztreedocs/mach_commands.py
@@ -0,0 +1,447 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+
+import fnmatch
+import multiprocessing
+import os
+import re
+import subprocess
+import sys
+import time
+import yaml
+import uuid
+
+from functools import partial
+from pprint import pprint
+
+from mach.registrar import Registrar
+from mozbuild.base import MachCommandBase
+from mach.decorators import (
+ Command,
+ CommandArgument,
+ CommandProvider,
+ SubCommand,
+)
+
+here = os.path.abspath(os.path.dirname(__file__))
+topsrcdir = os.path.abspath(os.path.dirname(os.path.dirname(here)))
+DOC_ROOT = os.path.join(topsrcdir, "docs")
+BASE_LINK = "http://gecko-docs.mozilla.org-l1.s3-website.us-west-2.amazonaws.com/"
+JSDOC_NOT_FOUND = """\
+JSDoc==3.5.5 is required to build the docs but was not found on your system.
+Please install it globally by running:
+
+ $ mach npm install -g jsdoc@3.5.5
+
+Bug 1498604 tracks bootstrapping jsdoc properly.
+Bug 1556460 tracks supporting newer versions of jsdoc.
+"""
+
+
+@CommandProvider
+class Documentation(MachCommandBase):
+ """Helps manage in-tree documentation."""
+
+ def __init__(self, *args, **kwargs):
+ super(Documentation, self).__init__(*args, **kwargs)
+
+ self._manager = None
+ self._project = None
+ self._version = None
+
+ @Command(
+ "doc",
+ category="devenv",
+ virtualenv_name="docs",
+ description="Generate and serve documentation from the tree.",
+ )
+ @CommandArgument(
+ "path",
+ default=None,
+ metavar="DIRECTORY",
+ nargs="?",
+ help="Path to documentation to build and display.",
+ )
+ @CommandArgument(
+ "--format", default="html", dest="fmt", help="Documentation format to write."
+ )
+ @CommandArgument(
+ "--outdir", default=None, metavar="DESTINATION", help="Where to write output."
+ )
+ @CommandArgument(
+ "--archive",
+ action="store_true",
+ help="Write a gzipped tarball of generated docs.",
+ )
+ @CommandArgument(
+ "--no-open",
+ dest="auto_open",
+ default=True,
+ action="store_false",
+ help="Don't automatically open HTML docs in a browser.",
+ )
+ @CommandArgument(
+ "--no-serve",
+ dest="serve",
+ default=True,
+ action="store_false",
+ help="Don't serve the generated docs after building.",
+ )
+ @CommandArgument(
+ "--http",
+ default="localhost:5500",
+ metavar="ADDRESS",
+ help="Serve documentation on the specified host and port, "
+ 'default "localhost:5500".',
+ )
+ @CommandArgument(
+ "--upload", action="store_true", help="Upload generated files to S3."
+ )
+ @CommandArgument(
+ "-j",
+ "--jobs",
+ default=str(multiprocessing.cpu_count()),
+ dest="jobs",
+ help="Distribute the build over N processes in parallel.",
+ )
+ @CommandArgument(
+ "--write-url", default=None, help="Write S3 Upload URL to text file"
+ )
+ @CommandArgument(
+ "--verbose", action="store_true", help="Run Sphinx in verbose mode"
+ )
+ def build_docs(
+ self,
+ path=None,
+ fmt="html",
+ outdir=None,
+ auto_open=True,
+ serve=True,
+ http=None,
+ archive=False,
+ upload=False,
+ jobs=None,
+ write_url=None,
+ verbose=None,
+ ):
+ if self.check_jsdoc():
+ return die(JSDOC_NOT_FOUND)
+
+ self.activate_virtualenv()
+ self.virtualenv_manager.install_pip_requirements(
+ os.path.join(here, "requirements.txt")
+ )
+
+ import webbrowser
+ from livereload import Server
+ from moztreedocs.package import create_tarball
+
+ unique_id = "%s/%s" % (self.project, str(uuid.uuid1()))
+
+ outdir = outdir or os.path.join(self.topobjdir, "docs")
+ savedir = os.path.join(outdir, fmt)
+
+ path = path or self.topsrcdir
+ path = os.path.normpath(os.path.abspath(path))
+
+ docdir = self._find_doc_dir(path)
+ if not docdir:
+ print(self._dump_sphinx_backtrace())
+ return die(
+ "failed to generate documentation:\n"
+ "%s: could not find docs at this location" % path
+ )
+
+ result = self._run_sphinx(docdir, savedir, fmt=fmt, jobs=jobs, verbose=verbose)
+ if result != 0:
+ print(self._dump_sphinx_backtrace())
+ return die(
+ "failed to generate documentation:\n"
+ "%s: sphinx return code %d" % (path, result)
+ )
+ else:
+ print("\nGenerated documentation:\n%s" % savedir)
+
+ print("Post processing HTML files")
+ self._post_process_html(savedir)
+
+ # Upload the artifact containing the link to S3
+ # This would be used by code-review to post the link to Phabricator
+ if write_url is not None:
+ unique_link = BASE_LINK + unique_id + "/index.html"
+ with open(write_url, "w") as fp:
+ fp.write(unique_link)
+ fp.flush()
+ print("Generated " + write_url)
+
+ if archive:
+ archive_path = os.path.join(outdir, "%s.tar.gz" % self.project)
+ create_tarball(archive_path, savedir)
+ print("Archived to %s" % archive_path)
+
+ if upload:
+ self._s3_upload(savedir, self.project, unique_id, self.version)
+
+ if not serve:
+ index_path = os.path.join(savedir, "index.html")
+ if auto_open and os.path.isfile(index_path):
+ webbrowser.open(index_path)
+ return
+
+ # Create livereload server. Any files modified in the specified docdir
+ # will cause a re-build and refresh of the browser (if open).
+ try:
+ host, port = http.split(":", 1)
+ port = int(port)
+ except ValueError:
+ return die("invalid address: %s" % http)
+
+ server = Server()
+
+ sphinx_trees = self.manager.trees or {savedir: docdir}
+ for _, src in sphinx_trees.items():
+ run_sphinx = partial(
+ self._run_sphinx, src, savedir, fmt=fmt, jobs=jobs, verbose=verbose
+ )
+ server.watch(src, run_sphinx)
+ server.serve(
+ host=host,
+ port=port,
+ root=savedir,
+ open_url_delay=0.1 if auto_open else None,
+ )
+
+ def _dump_sphinx_backtrace(self):
+ """
+ If there is a sphinx dump file, read and return
+ its content.
+ By default, it isn't displayed.
+ """
+ pattern = "sphinx-err-*"
+ output = ""
+ tmpdir = "/tmp"
+
+ if not os.path.isdir(tmpdir):
+ # Only run it on Linux
+ return
+ files = os.listdir(tmpdir)
+ for name in files:
+ if fnmatch.fnmatch(name, pattern):
+ pathFile = os.path.join(tmpdir, name)
+ stat = os.stat(pathFile)
+ output += "Name: {0} / Creation date: {1}\n".format(
+ pathFile, time.ctime(stat.st_mtime)
+ )
+ with open(pathFile) as f:
+ output += f.read()
+ return output
+
+ def _run_sphinx(
+ self, docdir, savedir, config=None, fmt="html", jobs=None, verbose=None
+ ):
+ import sphinx.cmd.build
+
+ config = config or self.manager.conf_py_path
+ args = [
+ "-T",
+ "-b",
+ fmt,
+ "-c",
+ os.path.dirname(config),
+ docdir,
+ savedir,
+ ]
+ if jobs:
+ args.extend(["-j", jobs])
+ if verbose:
+ args.extend(["-v", "-v"])
+ print("Run sphinx with:")
+ print(args)
+ return sphinx.cmd.build.build_main(args)
+
+ def _post_process_html(self, savedir):
+ """
+ Perform some operations on the generated html to fix some URL
+ """
+ MERMAID_VERSION = "8.4.4"
+ for root, _, files in os.walk(savedir):
+ for file in files:
+ if file.endswith(".html"):
+ p = os.path.join(root, file)
+
+ with open(p, "r") as file:
+ filedata = file.read()
+
+ # Workaround https://bugzilla.mozilla.org/show_bug.cgi?id=1607143
+ # to avoid a CSP error
+ # This method should be removed once
+ # https://github.com/mgaitan/sphinxcontrib-mermaid/pull/37 is merged
+ # As sphinx-mermaid currently uses an old version, also force
+ # a more recent version
+ filedata = re.sub(
+ r"https://unpkg.com/mermaid@.*/dist",
+ r"https://cdnjs.cloudflare.com/ajax/libs/mermaid/{}".format(
+ MERMAID_VERSION
+ ),
+ filedata,
+ )
+
+ with open(p, "w") as file:
+ file.write(filedata)
+
+ @property
+ def manager(self):
+ if not self._manager:
+ from moztreedocs import manager
+
+ self._manager = manager
+ return self._manager
+
+ def _read_project_properties(self):
+ import imp
+
+ path = os.path.normpath(self.manager.conf_py_path)
+ with open(path, "r") as fh:
+ conf = imp.load_module("doc_conf", fh, path, (".py", "r", imp.PY_SOURCE))
+
+ # Prefer the Mozilla project name, falling back to Sphinx's
+ # default variable if it isn't defined.
+ project = getattr(conf, "moz_project_name", None)
+ if not project:
+ project = conf.project.replace(" ", "_")
+
+ self._project = project
+ self._version = getattr(conf, "version", None)
+
+ @property
+ def project(self):
+ if not self._project:
+ self._read_project_properties()
+ return self._project
+
+ @property
+ def version(self):
+ if not self._version:
+ self._read_project_properties()
+ return self._version
+
+ def _find_doc_dir(self, path):
+ if os.path.isfile(path):
+ return
+
+ valid_doc_dirs = ("doc", "docs")
+ if os.path.basename(path) in valid_doc_dirs:
+ return path
+
+ for d in valid_doc_dirs:
+ p = os.path.join(path, d)
+ if os.path.isdir(p):
+ return p
+
+ def _s3_upload(self, root, project, unique_id, version=None):
+ from moztreedocs.package import distribution_files
+ from moztreedocs.upload import s3_upload, s3_set_redirects
+
+ # Workaround the issue
+ # BlockingIOError: [Errno 11] write could not complete without blocking
+ # https://github.com/travis-ci/travis-ci/issues/8920
+ import fcntl
+
+ fcntl.fcntl(1, fcntl.F_SETFL, 0)
+
+ # Files are uploaded to multiple locations:
+ #
+ # <project>/latest
+ # <project>/<version>
+ #
+ # This allows multiple projects and versions to be stored in the
+ # S3 bucket.
+
+ files = list(distribution_files(root))
+ key_prefixes = []
+ if version:
+ key_prefixes.append("%s/%s" % (project, version))
+
+ # Until we redirect / to main/latest, upload the main docs
+ # to the root.
+ if project == "main":
+ key_prefixes.append("")
+
+ key_prefixes.append(unique_id)
+
+ with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
+ redirects = yaml.safe_load(fh)["redirects"]
+
+ redirects = {k.strip("/"): v.strip("/") for k, v in redirects.items()}
+
+ all_redirects = {}
+
+ for prefix in key_prefixes:
+ s3_upload(files, prefix)
+
+ # Don't setup redirects for the "version" or "uuid" prefixes since
+ # we are exceeding a 50 redirect limit and external things are
+ # unlikely to link there anyway (see bug 1614908).
+ if (version and prefix.endswith(version)) or prefix == unique_id:
+ continue
+
+ if prefix:
+ prefix += "/"
+ all_redirects.update({prefix + k: prefix + v for k, v in redirects.items()})
+
+ print("Redirects currently staged")
+ pprint(all_redirects, indent=1)
+
+ s3_set_redirects(all_redirects)
+
+ unique_link = BASE_LINK + unique_id + "/index.html"
+ print("Uploaded documentation can be accessed here " + unique_link)
+
+ @SubCommand(
+ "doc",
+ "mach-telemetry",
+ description="Generate documentation from Glean metrics.yaml files",
+ )
+ def generate_telemetry_docs(self):
+ args = [
+ "glean_parser",
+ "translate",
+ "-f",
+ "markdown",
+ "-o",
+ os.path.join(topsrcdir, "python/mach/docs/"),
+ os.path.join(topsrcdir, "python/mach/pings.yaml"),
+ os.path.join(topsrcdir, "python/mach/metrics.yaml"),
+ ]
+ metrics_paths = [
+ handler.metrics_path
+ for handler in Registrar.command_handlers.values()
+ if handler.metrics_path is not None
+ ]
+ args.extend([os.path.join(self.topsrcdir, path) for path in set(metrics_paths)])
+ subprocess.check_output(args)
+
+ def check_jsdoc(self):
+ try:
+ from mozfile import which
+
+ exe_name = which("jsdoc")
+ if not exe_name:
+ return 1
+ out = subprocess.check_output([exe_name, "--version"])
+ version = out.split()[1]
+ except subprocess.CalledProcessError:
+ version = None
+
+ if not version or not version.startswith(b"3.5"):
+ return 1
+
+
+def die(msg, exit_code=1):
+ msg = "%s: %s" % (sys.argv[0], msg)
+ print(msg, file=sys.stderr)
+ return exit_code
diff --git a/tools/moztreedocs/package.py b/tools/moztreedocs/package.py
new file mode 100644
index 0000000000..9b4b20e229
--- /dev/null
+++ b/tools/moztreedocs/package.py
@@ -0,0 +1,31 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import os
+
+from mozpack.archive import create_tar_gz_from_files
+from mozpack.files import FileFinder
+
+
+def distribution_files(root):
+ """Find all files suitable for distributing.
+
+ Given the path to generated Sphinx documentation, returns an iterable
+ of (path, BaseFile) for files that should be archived, uploaded, etc.
+ Paths are relative to given root directory.
+ """
+ finder = FileFinder(root, ignore=("_staging", "_venv"))
+ return finder.find("**")
+
+
+def create_tarball(filename, root):
+ """Create a tar.gz archive of docs in a directory."""
+ files = dict(distribution_files(root))
+
+ with open(filename, "wb") as fh:
+ create_tar_gz_from_files(
+ fh, files, filename=os.path.basename(filename), compresslevel=6
+ )
diff --git a/tools/moztreedocs/requirements.in b/tools/moztreedocs/requirements.in
new file mode 100644
index 0000000000..d433e86b34
--- /dev/null
+++ b/tools/moztreedocs/requirements.in
@@ -0,0 +1,57 @@
+# pip freeze > requirements.in
+alabaster==0.7.12
+Babel==2.8.0
+backports-abc==0.5
+boto3==1.15.6
+botocore==1.18.6
+certifi==2020.6.20
+chardet==3.0.4
+commonmark==0.9.1
+docutils==0.16
+fluent.pygments==1.0
+fluent.syntax==0.18.1
+idna==2.10
+imagesize==1.2.0
+importlib-metadata==2.0.0
+Jinja2==2.11.2
+jmespath==0.10.0
+jsonschema==3.2.0
+livereload==2.6.3
+Markdown==3.3.3
+MarkupSafe==1.1.1
+mock==4.0.2
+packaging==20.4
+parsimonious==0.7.0
+pyasn1==0.4.8
+pyasn1-modules==0.2.8
+Pygments==2.7.1
+pyparsing==2.4.7
+python-dateutil==2.8.1
+python-frontmatter==0.5.0
+pytz==2020.1
+PyYAML==5.3.1
+recommonmark==0.7.1
+redo==2.0.3
+requests==2.24.0
+rsa==4.6
+s3transfer==0.3.3
+scandir==1.10.0
+sentry-sdk==0.14.3
+singledispatch==3.4.0.3
+six==1.15.0
+snowballstemmer==2.0.0
+Sphinx==3.4.2
+sphinx-copybutton==0.3.1
+sphinx-js==3.0.1
+sphinx-markdown-tables==0.0.15
+sphinx-rtd-theme==0.5.1
+sphinxcontrib-applehelp==1.0.2
+sphinxcontrib-devhelp==1.0.2
+sphinxcontrib-htmlhelp==1.0.3
+sphinxcontrib-jsmath==1.0.1
+sphinxcontrib-mermaid==0.5.0
+sphinxcontrib-qthelp==1.0.3
+sphinxcontrib-serializinghtml==1.1.4
+tornado==6.0.4
+urllib3==1.25.10
+zipp==3.2.0
diff --git a/tools/moztreedocs/requirements.txt b/tools/moztreedocs/requirements.txt
new file mode 100644
index 0000000000..b816ffbc27
--- /dev/null
+++ b/tools/moztreedocs/requirements.txt
@@ -0,0 +1,290 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile --generate-hashes requirements.in
+#
+alabaster==0.7.12 \
+ --hash=sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \
+ --hash=sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02 \
+ # via -r requirements.in, sphinx
+attrs==20.2.0 \
+ --hash=sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594 \
+ --hash=sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc \
+ # via jsonschema
+babel==2.8.0 \
+ --hash=sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38 \
+ --hash=sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4 \
+ # via -r requirements.in, sphinx
+backports-abc==0.5 \
+ --hash=sha256:033be54514a03e255df75c5aee8f9e672f663f93abb723444caec8fe43437bde \
+ --hash=sha256:52089f97fe7a9aa0d3277b220c1d730a85aefd64e1b2664696fe35317c5470a7 \
+ # via -r requirements.in
+boto3==1.15.6 \
+ --hash=sha256:87534080a5addad135fcd631fa8b57a12e1a234c23d86521e84fbbd9217fd6a5 \
+ --hash=sha256:c4c84c6647e84a9f270d86da7eea1a250c2529e26ddb39320546f235327f10e6 \
+ # via -r requirements.in
+botocore==1.18.6 \
+ --hash=sha256:31f04b68a6ebe8cfa97b4d70f54f29aef8b6a0bc9c4da7b8ee9b6a53fc69edae \
+ --hash=sha256:3de32a03679bb172a41c38e3c9af3f7259f3637f705aa2ac384b3233dc985b85 \
+ # via -r requirements.in, boto3, s3transfer
+certifi==2020.6.20 \
+ --hash=sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3 \
+ --hash=sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41 \
+ # via -r requirements.in, requests, sentry-sdk
+chardet==3.0.4 \
+ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
+ --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \
+ # via -r requirements.in, requests
+commonmark==0.9.1 \
+ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \
+ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 \
+ # via -r requirements.in, recommonmark
+docutils==0.16 \
+ --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
+ --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \
+ # via -r requirements.in, recommonmark, sphinx, sphinx-js
+fluent.pygments==1.0 \
+ --hash=sha256:625c87a8a2362ef304146b161d359dcf652bed2a1ae4869b5607b8e06d117d97 \
+ --hash=sha256:b44758f74f87e1aa9d78d8f53363962639c5bf99d88cf3e407d046b5249ec27f \
+ # via -r requirements.in
+fluent.syntax==0.18.1 \
+ --hash=sha256:0e63679fa4f1b3042565220a5127b4bab842424f07d6a13c12299e3b3835486a \
+ --hash=sha256:3a55f5e605d1b029a65cc8b6492c86ec4608e15447e73db1495de11fd46c104f \
+ # via -r requirements.in, fluent.pygments
+idna==2.10 \
+ --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \
+ --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 \
+ # via -r requirements.in, requests
+imagesize==1.2.0 \
+ --hash=sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1 \
+ --hash=sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1 \
+ # via -r requirements.in, sphinx
+importlib-metadata==2.0.0 \
+ --hash=sha256:77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da \
+ --hash=sha256:cefa1a2f919b866c5beb7c9f7b0ebb4061f30a8a9bf16d609b000e2dfaceb9c3 \
+ # via -r requirements.in
+jinja2==2.11.2 \
+ --hash=sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0 \
+ --hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035 \
+ # via -r requirements.in, sphinx, sphinx-js
+jmespath==0.10.0 \
+ --hash=sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9 \
+ --hash=sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f \
+ # via -r requirements.in, boto3, botocore
+jsonschema==3.2.0 \
+ --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \
+ --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a \
+ # via -r requirements.in
+livereload==2.6.3 \
+ --hash=sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869 \
+ # via -r requirements.in
+markdown==3.3.3 \
+ --hash=sha256:5d9f2b5ca24bc4c7a390d22323ca4bad200368612b5aaa7796babf971d2b2f18 \
+ --hash=sha256:c109c15b7dc20a9ac454c9e6025927d44460b85bd039da028d85e2b6d0bcc328 \
+ # via -r requirements.in, sphinx-markdown-tables
+markupsafe==1.1.1 \
+ --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
+ --hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \
+ --hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \
+ --hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \
+ --hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \
+ --hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \
+ --hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \
+ --hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \
+ --hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \
+ --hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \
+ --hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \
+ --hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \
+ --hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \
+ --hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \
+ --hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \
+ --hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \
+ --hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \
+ --hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \
+ --hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \
+ --hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \
+ --hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \
+ --hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \
+ --hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \
+ --hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \
+ --hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \
+ --hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \
+ --hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \
+ --hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \
+ --hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \
+ --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
+ --hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \
+ --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
+ --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \
+ # via -r requirements.in, jinja2
+mock==4.0.2 \
+ --hash=sha256:3f9b2c0196c60d21838f307f5825a7b86b678cedc58ab9e50a8988187b4d81e0 \
+ --hash=sha256:dd33eb70232b6118298d516bbcecd26704689c386594f0f3c4f13867b2c56f72 \
+ # via -r requirements.in
+packaging==20.4 \
+ --hash=sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8 \
+ --hash=sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181 \
+ # via -r requirements.in, sphinx
+parsimonious==0.7.0 \
+ --hash=sha256:396d424f64f834f9463e81ba79a331661507a21f1ed7b644f7f6a744006fd938 \
+ # via -r requirements.in, sphinx-js
+pyasn1-modules==0.2.8 \
+ --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \
+ --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 \
+ # via -r requirements.in
+pyasn1==0.4.8 \
+ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
+ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba \
+ # via -r requirements.in, pyasn1-modules, rsa
+pygments==2.7.1 \
+ --hash=sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998 \
+ --hash=sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7 \
+ # via -r requirements.in, fluent.pygments, sphinx
+pyparsing==2.4.7 \
+ --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \
+ --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b \
+ # via -r requirements.in, packaging
+pyrsistent==0.17.3 \
+ --hash=sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e \
+ # via jsonschema
+python-dateutil==2.8.1 \
+ --hash=sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c \
+ --hash=sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a \
+ # via -r requirements.in, botocore
+python-frontmatter==0.5.0 \
+ --hash=sha256:a7dcdfdaf498d488dce98bfa9452f8b70f803a923760ceab1ebd99291d98d28a \
+ --hash=sha256:a9c2e90fc38e9f0c68d8b82299040f331ca3b8525ac7fa5f6beffef52b26c426 \
+ # via -r requirements.in
+pytz==2020.1 \
+ --hash=sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed \
+ --hash=sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048 \
+ # via -r requirements.in, babel
+pyyaml==5.3.1 \
+ --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \
+ --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \
+ --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \
+ --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \
+ --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \
+ --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \
+ --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \
+ --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \
+ --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \
+ --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \
+ --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \
+ # via -r requirements.in, python-frontmatter
+recommonmark==0.7.1 \
+ --hash=sha256:1b1db69af0231efce3fa21b94ff627ea33dee7079a01dd0a7f8482c3da148b3f \
+ --hash=sha256:bdb4db649f2222dcd8d2d844f0006b958d627f732415d399791ee436a3686d67
+redo==2.0.3 \
+ --hash=sha256:36784bf8ae766e14f9db0e377ccfa02835d648321d2007b6ae0bf4fd612c0f94 \
+ --hash=sha256:71161cb0e928d824092a5f16203939bbc0867ce4c4685db263cf22c3ae7634a8 \
+ # via -r requirements.in
+requests==2.24.0 \
+ --hash=sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b \
+ --hash=sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898 \
+ # via -r requirements.in, sphinx
+rsa==4.6 \
+ --hash=sha256:109ea5a66744dd859bf16fe904b8d8b627adafb9408753161e766a92e7d681fa \
+ --hash=sha256:6166864e23d6b5195a5cfed6cd9fed0fe774e226d8f854fcb23b7bbef0350233 \
+ # via -r requirements.in
+s3transfer==0.3.3 \
+ --hash=sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13 \
+ --hash=sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db \
+ # via -r requirements.in, boto3
+scandir==1.10.0 \
+ --hash=sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e \
+ --hash=sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022 \
+ --hash=sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f \
+ --hash=sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f \
+ --hash=sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae \
+ --hash=sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173 \
+ --hash=sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4 \
+ --hash=sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32 \
+ --hash=sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188 \
+ --hash=sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d \
+ --hash=sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac \
+ # via -r requirements.in
+sentry-sdk==0.14.3 \
+ --hash=sha256:23808d571d2461a4ce3784ec12bbee5bdb8c026c143fe79d36cef8a6d653e71f \
+ --hash=sha256:bb90a4e19c7233a580715fc986cc44be2c48fc10b31e71580a2037e1c94b6950 \
+ # via -r requirements.in
+singledispatch==3.4.0.3 \
+ --hash=sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c \
+ --hash=sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8 \
+ # via -r requirements.in
+six==1.15.0 \
+ --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \
+ --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \
+ # via -r requirements.in, fluent.pygments, jsonschema, livereload, packaging, parsimonious, python-dateutil, python-frontmatter, singledispatch
+snowballstemmer==2.0.0 \
+ --hash=sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0 \
+ --hash=sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52 \
+ # via -r requirements.in, sphinx
+sphinx-copybutton==0.3.1 \
+ --hash=sha256:0e0461df394515284e3907e3f418a0c60ef6ab6c9a27a800c8552772d0a402a2 \
+ --hash=sha256:5125c718e763596e6e52d92e15ee0d6f4800ad3817939be6dee51218870b3e3d
+sphinx-js==3.0.1 \
+ --hash=sha256:00d8d8bb4fe0e4c8cf940c7412af257c7fb83eaedc69d79765f9a012d719ce20 \
+ --hash=sha256:6dd8e79c5ad09fef389af5021d7a672066f6e12a51130497f6c611544d62734b \
+ # via -r requirements.in
+sphinx-markdown-tables==0.0.15 \
+ --hash=sha256:24a37662d86ee8bceb7d4f7003df0b25bc52401369d1ddc40d13ae7b58697031 \
+ --hash=sha256:c5f423a018aed9447aad1fbbada32c21c7565183aaf2da3c7fdb7bf4dffa3a0b \
+ # via -r requirements.in
+sphinx-rtd-theme==0.5.1 \
+ --hash=sha256:eda689eda0c7301a80cf122dad28b1861e5605cbf455558f3775e1e8200e83a5 \
+ --hash=sha256:fa6bebd5ab9a73da8e102509a86f3fcc36dec04a0b52ea80e5a033b2aba00113
+Sphinx==3.4.2 \
+ --hash=sha256:77dec5ac77ca46eee54f59cf477780f4fb23327b3339ef39c8471abb829c1285 \
+ --hash=sha256:b8aa4eb5502c53d3b5ca13a07abeedacd887f7770c198952fd5b9530d973e767
+sphinxcontrib-applehelp==1.0.2 \
+ --hash=sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a \
+ --hash=sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58 \
+ # via -r requirements.in, sphinx
+sphinxcontrib-devhelp==1.0.2 \
+ --hash=sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e \
+ --hash=sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4 \
+ # via -r requirements.in, sphinx
+sphinxcontrib-htmlhelp==1.0.3 \
+ --hash=sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f \
+ --hash=sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b \
+ # via -r requirements.in, sphinx
+sphinxcontrib-jsmath==1.0.1 \
+ --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \
+ --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8 \
+ # via -r requirements.in, sphinx
+sphinxcontrib-mermaid==0.5.0 \
+ --hash=sha256:80a4754d71c3f66a5b4ccb320868eb12f5eb0f2d25102e77886ce7ce327ec827 \
+ # via -r requirements.in
+sphinxcontrib-qthelp==1.0.3 \
+ --hash=sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72 \
+ --hash=sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6 \
+ # via -r requirements.in, sphinx
+sphinxcontrib-serializinghtml==1.1.4 \
+ --hash=sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc \
+ --hash=sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a \
+ # via -r requirements.in, sphinx
+tornado==6.0.4 \
+ --hash=sha256:0fe2d45ba43b00a41cd73f8be321a44936dc1aba233dee979f17a042b83eb6dc \
+ --hash=sha256:22aed82c2ea340c3771e3babc5ef220272f6fd06b5108a53b4976d0d722bcd52 \
+ --hash=sha256:2c027eb2a393d964b22b5c154d1a23a5f8727db6fda837118a776b29e2b8ebc6 \
+ --hash=sha256:5217e601700f24e966ddab689f90b7ea4bd91ff3357c3600fa1045e26d68e55d \
+ --hash=sha256:5618f72e947533832cbc3dec54e1dffc1747a5cb17d1fd91577ed14fa0dc081b \
+ --hash=sha256:5f6a07e62e799be5d2330e68d808c8ac41d4a259b9cea61da4101b83cb5dc673 \
+ --hash=sha256:c58d56003daf1b616336781b26d184023ea4af13ae143d9dda65e31e534940b9 \
+ --hash=sha256:c952975c8ba74f546ae6de2e226ab3cc3cc11ae47baf607459a6728585bb542a \
+ --hash=sha256:c98232a3ac391f5faea6821b53db8db461157baa788f5d6222a193e9456e1740 \
+ # via -r requirements.in, livereload
+urllib3==1.25.10 \
+ --hash=sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a \
+ --hash=sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461 \
+ # via -r requirements.in, botocore, requests, sentry-sdk
+zipp==3.2.0 \
+ --hash=sha256:43f4fa8d8bb313e65d8323a3952ef8756bf40f9a5c3ea7334be23ee4ec8278b6 \
+ --hash=sha256:b52f22895f4cfce194bc8172f3819ee8de7540aa6d873535a8668b730b8b411f \
+ # via -r requirements.in, importlib-metadata
+
+# WARNING: The following packages were not pinned, but pip requires them to be
+# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
+# setuptools
diff --git a/tools/moztreedocs/upload.py b/tools/moztreedocs/upload.py
new file mode 100644
index 0000000000..ebc8715cda
--- /dev/null
+++ b/tools/moztreedocs/upload.py
@@ -0,0 +1,175 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals, print_function
+
+import io
+import mimetypes
+import os
+import sys
+
+import botocore
+import boto3
+import concurrent.futures as futures
+import requests
+from pprint import pprint
+
+from mozbuild.util import memoize
+
+
+@memoize
+def create_aws_session():
+ """
+ This function creates an aws session that is
+ shared between upload and delete both.
+ """
+ region = "us-west-2"
+ level = os.environ.get("MOZ_SCM_LEVEL", "1")
+ bucket = {
+ "1": "gecko-docs.mozilla.org-l1",
+ "2": "gecko-docs.mozilla.org-l2",
+ "3": "gecko-docs.mozilla.org",
+ }[level]
+ secrets_url = "http://taskcluster/secrets/v1/secret/"
+ secrets_url += "project/releng/gecko/build/level-{}/gecko-docs-upload".format(level)
+
+ # Get the credentials from the TC secrets service. Note that these
+ # differ per SCM level
+ if "TASK_ID" in os.environ:
+ print("Using AWS credentials from the secrets service")
+ session = requests.Session()
+ res = session.get(secrets_url)
+ res.raise_for_status()
+ secret = res.json()["secret"]
+ session = boto3.session.Session(
+ aws_access_key_id=secret["AWS_ACCESS_KEY_ID"],
+ aws_secret_access_key=secret["AWS_SECRET_ACCESS_KEY"],
+ region_name=region,
+ )
+ else:
+ print("Trying to use your AWS credentials..")
+ session = boto3.session.Session(region_name=region)
+
+ s3 = session.client("s3", config=botocore.client.Config(max_pool_connections=20))
+
+ return s3, bucket
+
+
+@memoize
+def get_s3_keys(s3, bucket):
+ kwargs = {"Bucket": bucket}
+ all_keys = []
+ while True:
+ response = s3.list_objects_v2(**kwargs)
+ for obj in response["Contents"]:
+ all_keys.append(obj["Key"])
+
+ try:
+ kwargs["ContinuationToken"] = response["NextContinuationToken"]
+ except KeyError:
+ break
+
+ return all_keys
+
+
+def s3_set_redirects(redirects):
+
+ s3, bucket = create_aws_session()
+
+ configuration = {"IndexDocument": {"Suffix": "index.html"}, "RoutingRules": []}
+
+ for path, redirect in redirects.items():
+ rule = {
+ "Condition": {"KeyPrefixEquals": path},
+ "Redirect": {"ReplaceKeyPrefixWith": redirect},
+ }
+ if os.environ.get("MOZ_SCM_LEVEL") == "3":
+ rule["Redirect"]["HostName"] = "firefox-source-docs.mozilla.org"
+
+ configuration["RoutingRules"].append(rule)
+
+ s3.put_bucket_website(
+ Bucket=bucket,
+ WebsiteConfiguration=configuration,
+ )
+
+
+def s3_delete_missing(files, key_prefix=None):
+ """Delete files in the S3 bucket.
+
+ Delete files on the S3 bucket that doesn't match the files
+ given as the param. If the key_prefix is not specified, missing
+ files that has main/ as a prefix will be removed. Otherwise, it
+ will remove files with the same prefix as key_prefix.
+ """
+ s3, bucket = create_aws_session()
+ files_on_server = get_s3_keys(s3, bucket)
+ if key_prefix:
+ files_on_server = [
+ path for path in files_on_server if path.startswith(key_prefix)
+ ]
+ else:
+ files_on_server = [
+ path for path in files_on_server if not path.startswith("main/")
+ ]
+ files = [key_prefix + "/" + path if key_prefix else path for path, f in files]
+ files_to_delete = [path for path in files_on_server if path not in files]
+
+ query_size = 1000
+ while files_to_delete:
+ keys_to_remove = [{"Key": key} for key in files_to_delete[:query_size]]
+ response = s3.delete_objects(
+ Bucket=bucket,
+ Delete={
+ "Objects": keys_to_remove,
+ }, # NOQA
+ )
+ pprint(response, indent=2)
+ files_to_delete = files_to_delete[query_size:]
+
+
+def s3_upload(files, key_prefix=None):
+ """Upload files to an S3 bucket.
+
+ ``files`` is an iterable of ``(path, BaseFile)`` (typically from a
+ mozpack Finder).
+
+ Keys in the bucket correspond to source filenames. If ``key_prefix`` is
+ defined, key names will be ``<key_prefix>/<path>``.
+ """
+ s3, bucket = create_aws_session()
+
+ def upload(f, path, bucket, key, extra_args):
+ # Need to flush to avoid buffering/interleaving from multiple threads.
+ sys.stdout.write("uploading %s to %s\n" % (path, key))
+ sys.stdout.flush()
+ s3.upload_fileobj(f, bucket, key, ExtraArgs=extra_args)
+
+ fs = []
+ with futures.ThreadPoolExecutor(20) as e:
+ for path, f in files:
+ content_type, content_encoding = mimetypes.guess_type(path)
+ extra_args = {}
+ if content_type:
+ if content_type.startswith("text/"):
+ content_type += '; charset="utf-8"'
+ extra_args["ContentType"] = content_type
+ if content_encoding:
+ extra_args["ContentEncoding"] = content_encoding
+
+ if key_prefix:
+ key = "%s/%s" % (key_prefix, path)
+ else:
+ key = path
+
+ # The file types returned by mozpack behave like file objects. But
+ # they don't accept an argument to read(). So we wrap in a BytesIO.
+ fs.append(
+ e.submit(upload, io.BytesIO(f.read()), path, bucket, key, extra_args)
+ )
+
+ s3_delete_missing(files, key_prefix)
+ # Need to do this to catch any exceptions.
+ for f in fs:
+ f.result()