summaryrefslogtreecommitdiffstats
path: root/third_party/python
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:14:29 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:14:29 +0000
commitfbaf0bb26397aa498eb9156f06d5a6fe34dd7dd8 (patch)
tree4c1ccaf5486d4f2009f9a338a98a83e886e29c97 /third_party/python
parentReleasing progress-linux version 124.0.1-1~progress7.99u1. (diff)
downloadfirefox-fbaf0bb26397aa498eb9156f06d5a6fe34dd7dd8.tar.xz
firefox-fbaf0bb26397aa498eb9156f06d5a6fe34dd7dd8.zip
Merging upstream version 125.0.1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/python')
-rw-r--r--third_party/python/Mako/Mako-1.1.2.dist-info/AUTHORS13
-rw-r--r--third_party/python/Mako/Mako-1.1.2.dist-info/LICENSE19
-rw-r--r--third_party/python/Mako/Mako-1.1.2.dist-info/METADATA82
-rw-r--r--third_party/python/Mako/Mako-1.1.2.dist-info/RECORD33
-rw-r--r--third_party/python/Mako/Mako-1.1.2.dist-info/WHEEL6
-rw-r--r--third_party/python/Mako/Mako-1.1.2.dist-info/entry_points.txt20
-rw-r--r--third_party/python/Mako/Mako-1.1.2.dist-info/top_level.txt1
-rw-r--r--third_party/python/Mako/mako/__init__.py8
-rw-r--r--third_party/python/Mako/mako/_ast_util.py716
-rw-r--r--third_party/python/Mako/mako/ast.py205
-rw-r--r--third_party/python/Mako/mako/cache.py240
-rw-r--r--third_party/python/Mako/mako/cmd.py103
-rw-r--r--third_party/python/Mako/mako/codegen.py1318
-rw-r--r--third_party/python/Mako/mako/compat.py166
-rw-r--r--third_party/python/Mako/mako/exceptions.py430
-rw-r--r--third_party/python/Mako/mako/ext/__init__.py0
-rw-r--r--third_party/python/Mako/mako/ext/autohandler.py70
-rw-r--r--third_party/python/Mako/mako/ext/babelplugin.py58
-rw-r--r--third_party/python/Mako/mako/ext/beaker_cache.py82
-rw-r--r--third_party/python/Mako/mako/ext/extract.py125
-rw-r--r--third_party/python/Mako/mako/ext/linguaplugin.py57
-rw-r--r--third_party/python/Mako/mako/ext/preprocessors.py20
-rw-r--r--third_party/python/Mako/mako/ext/pygmentplugin.py157
-rw-r--r--third_party/python/Mako/mako/ext/turbogears.py61
-rw-r--r--third_party/python/Mako/mako/filters.py219
-rw-r--r--third_party/python/Mako/mako/lexer.py490
-rw-r--r--third_party/python/Mako/mako/lookup.py372
-rw-r--r--third_party/python/Mako/mako/parsetree.py665
-rw-r--r--third_party/python/Mako/mako/pygen.py305
-rw-r--r--third_party/python/Mako/mako/pyparser.py242
-rw-r--r--third_party/python/Mako/mako/runtime.py970
-rw-r--r--third_party/python/Mako/mako/template.py780
-rw-r--r--third_party/python/Mako/mako/util.py400
-rw-r--r--third_party/python/glean_parser/glean_parser-13.0.0.dist-info/AUTHORS.md (renamed from third_party/python/glean_parser/glean_parser-11.0.1.dist-info/AUTHORS.md)0
-rw-r--r--third_party/python/glean_parser/glean_parser-13.0.0.dist-info/LICENSE (renamed from third_party/python/glean_parser/glean_parser-11.0.1.dist-info/LICENSE)0
-rw-r--r--third_party/python/glean_parser/glean_parser-13.0.0.dist-info/METADATA (renamed from third_party/python/glean_parser/glean_parser-11.0.1.dist-info/METADATA)19
-rw-r--r--third_party/python/glean_parser/glean_parser-13.0.0.dist-info/RECORD (renamed from third_party/python/glean_parser/glean_parser-11.0.1.dist-info/RECORD)36
-rw-r--r--third_party/python/glean_parser/glean_parser-13.0.0.dist-info/WHEEL (renamed from third_party/python/glean_parser/glean_parser-11.0.1.dist-info/WHEEL)0
-rw-r--r--third_party/python/glean_parser/glean_parser-13.0.0.dist-info/entry_points.txt (renamed from third_party/python/glean_parser/glean_parser-11.0.1.dist-info/entry_points.txt)0
-rw-r--r--third_party/python/glean_parser/glean_parser-13.0.0.dist-info/top_level.txt (renamed from third_party/python/glean_parser/glean_parser-11.0.1.dist-info/top_level.txt)0
-rw-r--r--third_party/python/glean_parser/glean_parser/go_server.py145
-rw-r--r--third_party/python/glean_parser/glean_parser/metrics.py60
-rw-r--r--third_party/python/glean_parser/glean_parser/pings.py4
-rw-r--r--third_party/python/glean_parser/glean_parser/python_server.py130
-rw-r--r--third_party/python/glean_parser/glean_parser/rust.py23
-rw-r--r--third_party/python/glean_parser/glean_parser/schemas/metrics.2-0-0.schema.yaml13
-rw-r--r--third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml15
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/go_server.jinja2225
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/python_server.jinja2194
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/rust.jinja249
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/swift.jinja21
-rw-r--r--third_party/python/glean_parser/glean_parser/translate.py4
-rw-r--r--third_party/python/glean_parser/glean_parser/util.py1
-rw-r--r--third_party/python/poetry.lock28
-rw-r--r--third_party/python/requirements.in3
-rw-r--r--third_party/python/requirements.txt10
56 files changed, 9364 insertions, 29 deletions
diff --git a/third_party/python/Mako/Mako-1.1.2.dist-info/AUTHORS b/third_party/python/Mako/Mako-1.1.2.dist-info/AUTHORS
new file mode 100644
index 0000000000..81d16dc1a9
--- /dev/null
+++ b/third_party/python/Mako/Mako-1.1.2.dist-info/AUTHORS
@@ -0,0 +1,13 @@
+Mako was created by Michael Bayer.
+
+Major contributing authors include:
+
+- Michael Bayer <mike_mp@zzzcomputing.com>
+- Geoffrey T. Dairiki <dairiki@dairiki.org>
+- Philip Jenvey <pjenvey@underboss.org>
+- David Peckam
+- Armin Ronacher
+- Ben Bangert <ben@groovie.org>
+- Ben Trofatter
+
+
diff --git a/third_party/python/Mako/Mako-1.1.2.dist-info/LICENSE b/third_party/python/Mako/Mako-1.1.2.dist-info/LICENSE
new file mode 100644
index 0000000000..1f835e94a1
--- /dev/null
+++ b/third_party/python/Mako/Mako-1.1.2.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE. \ No newline at end of file
diff --git a/third_party/python/Mako/Mako-1.1.2.dist-info/METADATA b/third_party/python/Mako/Mako-1.1.2.dist-info/METADATA
new file mode 100644
index 0000000000..fb64a7ae0c
--- /dev/null
+++ b/third_party/python/Mako/Mako-1.1.2.dist-info/METADATA
@@ -0,0 +1,82 @@
+Metadata-Version: 2.1
+Name: Mako
+Version: 1.1.2
+Summary: A super-fast templating language that borrows the best ideas from the existing templating languages.
+Home-page: https://www.makotemplates.org/
+Author: Mike Bayer
+Author-email: mike@zzzcomputing.com
+License: MIT
+Project-URL: Documentation, https://docs.makotemplates.org
+Project-URL: Issue Tracker, https://github.com/sqlalchemy/mako
+Keywords: templates
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+Requires-Dist: MarkupSafe (>=0.9.2)
+Provides-Extra: babel
+Requires-Dist: Babel ; extra == 'babel'
+Provides-Extra: lingua
+Requires-Dist: lingua ; extra == 'lingua'
+
+=========================
+Mako Templates for Python
+=========================
+
+Mako is a template library written in Python. It provides a familiar, non-XML
+syntax which compiles into Python modules for maximum performance. Mako's
+syntax and API borrows from the best ideas of many others, including Django
+templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded
+Python (i.e. Python Server Page) language, which refines the familiar ideas
+of componentized layout and inheritance to produce one of the most
+straightforward and flexible models available, while also maintaining close
+ties to Python calling and scoping semantics.
+
+Nutshell
+========
+
+::
+
+ <%inherit file="base.html"/>
+ <%
+ rows = [[v for v in range(0,10)] for row in range(0,10)]
+ %>
+ <table>
+ % for row in rows:
+ ${makerow(row)}
+ % endfor
+ </table>
+
+ <%def name="makerow(row)">
+ <tr>
+ % for name in row:
+ <td>${name}</td>\
+ % endfor
+ </tr>
+ </%def>
+
+Philosophy
+===========
+
+Python is a great scripting language. Don't reinvent the wheel...your templates can handle it !
+
+Documentation
+==============
+
+See documentation for Mako at https://docs.makotemplates.org/en/latest/
+
+License
+========
+
+Mako is licensed under an MIT-style license (see LICENSE).
+Other incorporated projects may be licensed under different licenses.
+All licenses allow for non-commercial and commercial use.
+
+
diff --git a/third_party/python/Mako/Mako-1.1.2.dist-info/RECORD b/third_party/python/Mako/Mako-1.1.2.dist-info/RECORD
new file mode 100644
index 0000000000..25f3b652f9
--- /dev/null
+++ b/third_party/python/Mako/Mako-1.1.2.dist-info/RECORD
@@ -0,0 +1,33 @@
+mako/__init__.py,sha256=kukH4UZzZx0XHsTBrfQMJ2fxcncjreyZv2m6PzvPWAM,242
+mako/_ast_util.py,sha256=QKXZC0DbpYefKhTrQZjLgjcNXlTgY38sbB-vmBR2HpU,20414
+mako/ast.py,sha256=T5KnOwZewqAfULULLLWp6joGD-j14SiCtrH1-KGJCpQ,6789
+mako/cache.py,sha256=N1VoKHul8K7RUwsGwoUL-HMtylDvrL6iGWNh7_AI1dc,7736
+mako/cmd.py,sha256=HZxSUsAFVHVrcWvb43Nh_vdbrGeJLFNTR6ejyhdZ0dc,2859
+mako/codegen.py,sha256=DoxSM34-305v0E4Ox7Y31nsVtKAmCEbRVC3BmNFy_54,47892
+mako/compat.py,sha256=08w8lB0Z3QKQi9vd4n4xUtjG_A3wOrk3QdvxkHlribY,3848
+mako/exceptions.py,sha256=ogXjpZO1beh37cWWa0pm4IHVNKsuNIUnqOjWznEKMLQ,13110
+mako/filters.py,sha256=vzpdxOOXWco5_evH_6_9a8b92lHuDC7Sl3XZhFyIVV8,6063
+mako/lexer.py,sha256=pNKb5MVSzOdW0L2S97TYPFBATmHD_mo8Br9-5RSfIUM,16926
+mako/lookup.py,sha256=TQ-wx1DR8rj2HqsNJBsrS4ZqROwAeTRkw-LrTbSQxFc,12718
+mako/parsetree.py,sha256=epGi5wKtZA8LcpzdrEXl_jjPGPvuO-IjuDSAYoLAp4Y,19411
+mako/pygen.py,sha256=dKxVMCSPMaXbMTgQyd5_J7WvdzPpuUprufR4PS3cyqY,10073
+mako/pyparser.py,sha256=eU3-mgdrmj1cL9SgFxh1rvIFcio_6oJxoNJnyMuGiCI,7789
+mako/runtime.py,sha256=2fhZBgmnP3wrWlZAVd6PZCSeuuGVXVA8BmRdXs6VEDo,28040
+mako/template.py,sha256=hKYaXvRzqU7Map8wXaGTGXc8gPl8EDF4WqoNpIF-EqQ,26558
+mako/util.py,sha256=5DoK9dvPpzFK6ZnL3hhzMHQ0meanhXrH8aHoO8fbkCs,11038
+mako/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+mako/ext/autohandler.py,sha256=FJs1cY6Vz_NePboCUr-3STZY38btxFRZsPhMNe6NSms,1885
+mako/ext/babelplugin.py,sha256=EquybfGr6ffla72QapzkwTNpEwi_P87f1s9C7xNFuJw,2138
+mako/ext/beaker_cache.py,sha256=oDN-vSLeKfnAJKlPgrKKuHI-g7zszwd2y1uApBoOkeM,2599
+mako/ext/extract.py,sha256=oBx6lQqLOtDMu8YpBYK_klCZvMuVvbAAA3I-WUyTPXo,4616
+mako/ext/linguaplugin.py,sha256=Z8bV4RHjDJhqMApINSadycM1Xj-B2vB1_i3YN3l2KSc,1954
+mako/ext/preprocessors.py,sha256=TfHmG6EgzYumbCiFU06IHXG_n5y2sA6RFtDBNJ613M8,576
+mako/ext/pygmentplugin.py,sha256=wYJixnCqHJ7zHPT6gB3tGUg-R6yctFNpEhNIKbHHl-E,4951
+mako/ext/turbogears.py,sha256=BcKxkPpkeawkFqj6zS5sUQYt4I6LafRDYMLIDOg0ZPY,2165
+Mako-1.1.2.dist-info/AUTHORS,sha256=Io2Vw70mjYS7yFcUuJxhIGiMUQt8FWJuxiiwyUW1WRg,282
+Mako-1.1.2.dist-info/LICENSE,sha256=R80NQbEJL5Fhz7Yp7RXlzqGFFEcQ_0YzpCge8Ij_Xec,1097
+Mako-1.1.2.dist-info/METADATA,sha256=fxw2oNdTkNQnafc1Enid-QapQv1OaYnqwtNDJoeihoo,2600
+Mako-1.1.2.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
+Mako-1.1.2.dist-info/entry_points.txt,sha256=GSuruj6eMrGwr7dHBGOdDkmgTTUQXr5ZrQjMmkPclKA,603
+Mako-1.1.2.dist-info/top_level.txt,sha256=LItdH8cDPetpUu8rUyBG3DObS6h9Gcpr9j_WLj2S-R0,5
+Mako-1.1.2.dist-info/RECORD,,
diff --git a/third_party/python/Mako/Mako-1.1.2.dist-info/WHEEL b/third_party/python/Mako/Mako-1.1.2.dist-info/WHEEL
new file mode 100644
index 0000000000..8b701e93c2
--- /dev/null
+++ b/third_party/python/Mako/Mako-1.1.2.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.6)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/Mako/Mako-1.1.2.dist-info/entry_points.txt b/third_party/python/Mako/Mako-1.1.2.dist-info/entry_points.txt
new file mode 100644
index 0000000000..8e033c0040
--- /dev/null
+++ b/third_party/python/Mako/Mako-1.1.2.dist-info/entry_points.txt
@@ -0,0 +1,20 @@
+
+ [python.templating.engines]
+ mako = mako.ext.turbogears:TGPlugin
+
+ [pygments.lexers]
+ mako = mako.ext.pygmentplugin:MakoLexer
+ html+mako = mako.ext.pygmentplugin:MakoHtmlLexer
+ xml+mako = mako.ext.pygmentplugin:MakoXmlLexer
+ js+mako = mako.ext.pygmentplugin:MakoJavascriptLexer
+ css+mako = mako.ext.pygmentplugin:MakoCssLexer
+
+ [babel.extractors]
+ mako = mako.ext.babelplugin:extract [babel]
+
+ [lingua.extractors]
+ mako = mako.ext.linguaplugin:LinguaMakoExtractor [lingua]
+
+ [console_scripts]
+ mako-render = mako.cmd:cmdline
+ \ No newline at end of file
diff --git a/third_party/python/Mako/Mako-1.1.2.dist-info/top_level.txt b/third_party/python/Mako/Mako-1.1.2.dist-info/top_level.txt
new file mode 100644
index 0000000000..2951cdd49d
--- /dev/null
+++ b/third_party/python/Mako/Mako-1.1.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+mako
diff --git a/third_party/python/Mako/mako/__init__.py b/third_party/python/Mako/mako/__init__.py
new file mode 100644
index 0000000000..16d187c0ce
--- /dev/null
+++ b/third_party/python/Mako/mako/__init__.py
@@ -0,0 +1,8 @@
+# mako/__init__.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+
+__version__ = '1.1.2'
diff --git a/third_party/python/Mako/mako/_ast_util.py b/third_party/python/Mako/mako/_ast_util.py
new file mode 100644
index 0000000000..bdcdbf6913
--- /dev/null
+++ b/third_party/python/Mako/mako/_ast_util.py
@@ -0,0 +1,716 @@
+# mako/_ast_util.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""
+ ast
+ ~~~
+
+ This is a stripped down version of Armin Ronacher's ast module.
+
+ :copyright: Copyright 2008 by Armin Ronacher.
+ :license: Python License.
+"""
+
+
+from _ast import Add
+from _ast import And
+from _ast import AST
+from _ast import BitAnd
+from _ast import BitOr
+from _ast import BitXor
+from _ast import Div
+from _ast import Eq
+from _ast import FloorDiv
+from _ast import Gt
+from _ast import GtE
+from _ast import If
+from _ast import In
+from _ast import Invert
+from _ast import Is
+from _ast import IsNot
+from _ast import LShift
+from _ast import Lt
+from _ast import LtE
+from _ast import Mod
+from _ast import Mult
+from _ast import Name
+from _ast import Not
+from _ast import NotEq
+from _ast import NotIn
+from _ast import Or
+from _ast import PyCF_ONLY_AST
+from _ast import RShift
+from _ast import Sub
+from _ast import UAdd
+from _ast import USub
+
+from mako.compat import arg_stringname
+
+BOOLOP_SYMBOLS = {And: "and", Or: "or"}
+
+BINOP_SYMBOLS = {
+ Add: "+",
+ Sub: "-",
+ Mult: "*",
+ Div: "/",
+ FloorDiv: "//",
+ Mod: "%",
+ LShift: "<<",
+ RShift: ">>",
+ BitOr: "|",
+ BitAnd: "&",
+ BitXor: "^",
+}
+
+CMPOP_SYMBOLS = {
+ Eq: "==",
+ Gt: ">",
+ GtE: ">=",
+ In: "in",
+ Is: "is",
+ IsNot: "is not",
+ Lt: "<",
+ LtE: "<=",
+ NotEq: "!=",
+ NotIn: "not in",
+}
+
+UNARYOP_SYMBOLS = {Invert: "~", Not: "not", UAdd: "+", USub: "-"}
+
+ALL_SYMBOLS = {}
+ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
+ALL_SYMBOLS.update(BINOP_SYMBOLS)
+ALL_SYMBOLS.update(CMPOP_SYMBOLS)
+ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
+
+
+def parse(expr, filename="<unknown>", mode="exec"):
+ """Parse an expression into an AST node."""
+ return compile(expr, filename, mode, PyCF_ONLY_AST)
+
+
+def iter_fields(node):
+ """Iterate over all fields of a node, only yielding existing fields."""
+ # CPython 2.5 compat
+ if not hasattr(node, "_fields") or not node._fields:
+ return
+ for field in node._fields:
+ try:
+ yield field, getattr(node, field)
+ except AttributeError:
+ pass
+
+
+class NodeVisitor(object):
+
+ """
+ Walks the abstract syntax tree and call visitor functions for every node
+ found. The visitor functions may return values which will be forwarded
+ by the `visit` method.
+
+ Per default the visitor functions for the nodes are ``'visit_'`` +
+ class name of the node. So a `TryFinally` node visit function would
+ be `visit_TryFinally`. This behavior can be changed by overriding
+ the `get_visitor` function. If no visitor function exists for a node
+ (return value `None`) the `generic_visit` visitor is used instead.
+
+ Don't use the `NodeVisitor` if you want to apply changes to nodes during
+ traversing. For this a special visitor exists (`NodeTransformer`) that
+ allows modifications.
+ """
+
+ def get_visitor(self, node):
+ """
+ Return the visitor function for this node or `None` if no visitor
+ exists for this node. In that case the generic visit function is
+ used instead.
+ """
+ method = "visit_" + node.__class__.__name__
+ return getattr(self, method, None)
+
+ def visit(self, node):
+ """Visit a node."""
+ f = self.get_visitor(node)
+ if f is not None:
+ return f(node)
+ return self.generic_visit(node)
+
+ def generic_visit(self, node):
+ """Called if no explicit visitor function exists for a node."""
+ for field, value in iter_fields(node):
+ if isinstance(value, list):
+ for item in value:
+ if isinstance(item, AST):
+ self.visit(item)
+ elif isinstance(value, AST):
+ self.visit(value)
+
+
+class NodeTransformer(NodeVisitor):
+
+ """
+ Walks the abstract syntax tree and allows modifications of nodes.
+
+ The `NodeTransformer` will walk the AST and use the return value of the
+ visitor functions to replace or remove the old node. If the return
+ value of the visitor function is `None` the node will be removed
+ from the previous location otherwise it's replaced with the return
+ value. The return value may be the original node in which case no
+ replacement takes place.
+
+ Here an example transformer that rewrites all `foo` to `data['foo']`::
+
+ class RewriteName(NodeTransformer):
+
+ def visit_Name(self, node):
+ return copy_location(Subscript(
+ value=Name(id='data', ctx=Load()),
+ slice=Index(value=Str(s=node.id)),
+ ctx=node.ctx
+ ), node)
+
+ Keep in mind that if the node you're operating on has child nodes
+ you must either transform the child nodes yourself or call the generic
+ visit function for the node first.
+
+ Nodes that were part of a collection of statements (that applies to
+ all statement nodes) may also return a list of nodes rather than just
+ a single node.
+
+ Usually you use the transformer like this::
+
+ node = YourTransformer().visit(node)
+ """
+
+ def generic_visit(self, node):
+ for field, old_value in iter_fields(node):
+ old_value = getattr(node, field, None)
+ if isinstance(old_value, list):
+ new_values = []
+ for value in old_value:
+ if isinstance(value, AST):
+ value = self.visit(value)
+ if value is None:
+ continue
+ elif not isinstance(value, AST):
+ new_values.extend(value)
+ continue
+ new_values.append(value)
+ old_value[:] = new_values
+ elif isinstance(old_value, AST):
+ new_node = self.visit(old_value)
+ if new_node is None:
+ delattr(node, field)
+ else:
+ setattr(node, field, new_node)
+ return node
+
+
+class SourceGenerator(NodeVisitor):
+
+ """
+ This visitor is able to transform a well formed syntax tree into python
+ sourcecode. For more details have a look at the docstring of the
+ `node_to_source` function.
+ """
+
+ def __init__(self, indent_with):
+ self.result = []
+ self.indent_with = indent_with
+ self.indentation = 0
+ self.new_lines = 0
+
+ def write(self, x):
+ if self.new_lines:
+ if self.result:
+ self.result.append("\n" * self.new_lines)
+ self.result.append(self.indent_with * self.indentation)
+ self.new_lines = 0
+ self.result.append(x)
+
+ def newline(self, n=1):
+ self.new_lines = max(self.new_lines, n)
+
+ def body(self, statements):
+ self.new_line = True
+ self.indentation += 1
+ for stmt in statements:
+ self.visit(stmt)
+ self.indentation -= 1
+
+ def body_or_else(self, node):
+ self.body(node.body)
+ if node.orelse:
+ self.newline()
+ self.write("else:")
+ self.body(node.orelse)
+
+ def signature(self, node):
+ want_comma = []
+
+ def write_comma():
+ if want_comma:
+ self.write(", ")
+ else:
+ want_comma.append(True)
+
+ padding = [None] * (len(node.args) - len(node.defaults))
+ for arg, default in zip(node.args, padding + node.defaults):
+ write_comma()
+ self.visit(arg)
+ if default is not None:
+ self.write("=")
+ self.visit(default)
+ if node.vararg is not None:
+ write_comma()
+ self.write("*" + arg_stringname(node.vararg))
+ if node.kwarg is not None:
+ write_comma()
+ self.write("**" + arg_stringname(node.kwarg))
+
+ def decorators(self, node):
+ for decorator in node.decorator_list:
+ self.newline()
+ self.write("@")
+ self.visit(decorator)
+
+ # Statements
+
+ def visit_Assign(self, node):
+ self.newline()
+ for idx, target in enumerate(node.targets):
+ if idx:
+ self.write(", ")
+ self.visit(target)
+ self.write(" = ")
+ self.visit(node.value)
+
+ def visit_AugAssign(self, node):
+ self.newline()
+ self.visit(node.target)
+ self.write(BINOP_SYMBOLS[type(node.op)] + "=")
+ self.visit(node.value)
+
+ def visit_ImportFrom(self, node):
+ self.newline()
+ self.write("from %s%s import " % ("." * node.level, node.module))
+ for idx, item in enumerate(node.names):
+ if idx:
+ self.write(", ")
+ self.write(item)
+
+ def visit_Import(self, node):
+ self.newline()
+ for item in node.names:
+ self.write("import ")
+ self.visit(item)
+
+ def visit_Expr(self, node):
+ self.newline()
+ self.generic_visit(node)
+
+ def visit_FunctionDef(self, node):
+ self.newline(n=2)
+ self.decorators(node)
+ self.newline()
+ self.write("def %s(" % node.name)
+ self.signature(node.args)
+ self.write("):")
+ self.body(node.body)
+
+ def visit_ClassDef(self, node):
+ have_args = []
+
+ def paren_or_comma():
+ if have_args:
+ self.write(", ")
+ else:
+ have_args.append(True)
+ self.write("(")
+
+ self.newline(n=3)
+ self.decorators(node)
+ self.newline()
+ self.write("class %s" % node.name)
+ for base in node.bases:
+ paren_or_comma()
+ self.visit(base)
+ # XXX: the if here is used to keep this module compatible
+ # with python 2.6.
+ if hasattr(node, "keywords"):
+ for keyword in node.keywords:
+ paren_or_comma()
+ self.write(keyword.arg + "=")
+ self.visit(keyword.value)
+ if getattr(node, "starargs", None):
+ paren_or_comma()
+ self.write("*")
+ self.visit(node.starargs)
+ if getattr(node, "kwargs", None):
+ paren_or_comma()
+ self.write("**")
+ self.visit(node.kwargs)
+ self.write(have_args and "):" or ":")
+ self.body(node.body)
+
+ def visit_If(self, node):
+ self.newline()
+ self.write("if ")
+ self.visit(node.test)
+ self.write(":")
+ self.body(node.body)
+ while True:
+ else_ = node.orelse
+ if len(else_) == 1 and isinstance(else_[0], If):
+ node = else_[0]
+ self.newline()
+ self.write("elif ")
+ self.visit(node.test)
+ self.write(":")
+ self.body(node.body)
+ else:
+ self.newline()
+ self.write("else:")
+ self.body(else_)
+ break
+
+ def visit_For(self, node):
+ self.newline()
+ self.write("for ")
+ self.visit(node.target)
+ self.write(" in ")
+ self.visit(node.iter)
+ self.write(":")
+ self.body_or_else(node)
+
+ def visit_While(self, node):
+ self.newline()
+ self.write("while ")
+ self.visit(node.test)
+ self.write(":")
+ self.body_or_else(node)
+
+ def visit_With(self, node):
+ self.newline()
+ self.write("with ")
+ self.visit(node.context_expr)
+ if node.optional_vars is not None:
+ self.write(" as ")
+ self.visit(node.optional_vars)
+ self.write(":")
+ self.body(node.body)
+
+ def visit_Pass(self, node):
+ self.newline()
+ self.write("pass")
+
+ def visit_Print(self, node):
+ # XXX: python 2.6 only
+ self.newline()
+ self.write("print ")
+ want_comma = False
+ if node.dest is not None:
+ self.write(" >> ")
+ self.visit(node.dest)
+ want_comma = True
+ for value in node.values:
+ if want_comma:
+ self.write(", ")
+ self.visit(value)
+ want_comma = True
+ if not node.nl:
+ self.write(",")
+
+ def visit_Delete(self, node):
+ self.newline()
+ self.write("del ")
+ for idx, target in enumerate(node):
+ if idx:
+ self.write(", ")
+ self.visit(target)
+
+ def visit_TryExcept(self, node):
+ self.newline()
+ self.write("try:")
+ self.body(node.body)
+ for handler in node.handlers:
+ self.visit(handler)
+
+ def visit_TryFinally(self, node):
+ self.newline()
+ self.write("try:")
+ self.body(node.body)
+ self.newline()
+ self.write("finally:")
+ self.body(node.finalbody)
+
+ def visit_Global(self, node):
+ self.newline()
+ self.write("global " + ", ".join(node.names))
+
+ def visit_Nonlocal(self, node):
+ self.newline()
+ self.write("nonlocal " + ", ".join(node.names))
+
+ def visit_Return(self, node):
+ self.newline()
+ self.write("return ")
+ self.visit(node.value)
+
+ def visit_Break(self, node):
+ self.newline()
+ self.write("break")
+
+ def visit_Continue(self, node):
+ self.newline()
+ self.write("continue")
+
+ def visit_Raise(self, node):
+ # XXX: Python 2.6 / 3.0 compatibility
+ self.newline()
+ self.write("raise")
+ if hasattr(node, "exc") and node.exc is not None:
+ self.write(" ")
+ self.visit(node.exc)
+ if node.cause is not None:
+ self.write(" from ")
+ self.visit(node.cause)
+ elif hasattr(node, "type") and node.type is not None:
+ self.visit(node.type)
+ if node.inst is not None:
+ self.write(", ")
+ self.visit(node.inst)
+ if node.tback is not None:
+ self.write(", ")
+ self.visit(node.tback)
+
+ # Expressions
+
+ def visit_Attribute(self, node):
+ self.visit(node.value)
+ self.write("." + node.attr)
+
+ def visit_Call(self, node):
+ want_comma = []
+
+ def write_comma():
+ if want_comma:
+ self.write(", ")
+ else:
+ want_comma.append(True)
+
+ self.visit(node.func)
+ self.write("(")
+ for arg in node.args:
+ write_comma()
+ self.visit(arg)
+ for keyword in node.keywords:
+ write_comma()
+ self.write(keyword.arg + "=")
+ self.visit(keyword.value)
+ if getattr(node, "starargs", None):
+ write_comma()
+ self.write("*")
+ self.visit(node.starargs)
+ if getattr(node, "kwargs", None):
+ write_comma()
+ self.write("**")
+ self.visit(node.kwargs)
+ self.write(")")
+
+ def visit_Name(self, node):
+ self.write(node.id)
+
+ def visit_NameConstant(self, node):
+ self.write(str(node.value))
+
+ def visit_arg(self, node):
+ self.write(node.arg)
+
+ def visit_Str(self, node):
+ self.write(repr(node.s))
+
+ def visit_Bytes(self, node):
+ self.write(repr(node.s))
+
+ def visit_Num(self, node):
+ self.write(repr(node.n))
+
+ # newly needed in Python 3.8
+ def visit_Constant(self, node):
+ self.write(repr(node.value))
+
+ def visit_Tuple(self, node):
+ self.write("(")
+ idx = -1
+ for idx, item in enumerate(node.elts):
+ if idx:
+ self.write(", ")
+ self.visit(item)
+ self.write(idx and ")" or ",)")
+
+ def sequence_visit(left, right):
+ def visit(self, node):
+ self.write(left)
+ for idx, item in enumerate(node.elts):
+ if idx:
+ self.write(", ")
+ self.visit(item)
+ self.write(right)
+
+ return visit
+
+ visit_List = sequence_visit("[", "]")
+ visit_Set = sequence_visit("{", "}")
+ del sequence_visit
+
+ def visit_Dict(self, node):
+ self.write("{")
+ for idx, (key, value) in enumerate(zip(node.keys, node.values)):
+ if idx:
+ self.write(", ")
+ self.visit(key)
+ self.write(": ")
+ self.visit(value)
+ self.write("}")
+
+ def visit_BinOp(self, node):
+ self.write("(")
+ self.visit(node.left)
+ self.write(" %s " % BINOP_SYMBOLS[type(node.op)])
+ self.visit(node.right)
+ self.write(")")
+
+ def visit_BoolOp(self, node):
+ self.write("(")
+ for idx, value in enumerate(node.values):
+ if idx:
+ self.write(" %s " % BOOLOP_SYMBOLS[type(node.op)])
+ self.visit(value)
+ self.write(")")
+
+ def visit_Compare(self, node):
+ self.write("(")
+ self.visit(node.left)
+ for op, right in zip(node.ops, node.comparators):
+ self.write(" %s " % CMPOP_SYMBOLS[type(op)])
+ self.visit(right)
+ self.write(")")
+
+ def visit_UnaryOp(self, node):
+ self.write("(")
+ op = UNARYOP_SYMBOLS[type(node.op)]
+ self.write(op)
+ if op == "not":
+ self.write(" ")
+ self.visit(node.operand)
+ self.write(")")
+
+ def visit_Subscript(self, node):
+ self.visit(node.value)
+ self.write("[")
+ self.visit(node.slice)
+ self.write("]")
+
+ def visit_Slice(self, node):
+ if node.lower is not None:
+ self.visit(node.lower)
+ self.write(":")
+ if node.upper is not None:
+ self.visit(node.upper)
+ if node.step is not None:
+ self.write(":")
+ if not (isinstance(node.step, Name) and node.step.id == "None"):
+ self.visit(node.step)
+
+ def visit_ExtSlice(self, node):
+ for idx, item in node.dims:
+ if idx:
+ self.write(", ")
+ self.visit(item)
+
+ def visit_Yield(self, node):
+ self.write("yield ")
+ self.visit(node.value)
+
+ def visit_Lambda(self, node):
+ self.write("lambda ")
+ self.signature(node.args)
+ self.write(": ")
+ self.visit(node.body)
+
+ def visit_Ellipsis(self, node):
+ self.write("Ellipsis")
+
+ def generator_visit(left, right):
+ def visit(self, node):
+ self.write(left)
+ self.visit(node.elt)
+ for comprehension in node.generators:
+ self.visit(comprehension)
+ self.write(right)
+
+ return visit
+
+ visit_ListComp = generator_visit("[", "]")
+ visit_GeneratorExp = generator_visit("(", ")")
+ visit_SetComp = generator_visit("{", "}")
+ del generator_visit
+
+ def visit_DictComp(self, node):
+ self.write("{")
+ self.visit(node.key)
+ self.write(": ")
+ self.visit(node.value)
+ for comprehension in node.generators:
+ self.visit(comprehension)
+ self.write("}")
+
+ def visit_IfExp(self, node):
+ self.visit(node.body)
+ self.write(" if ")
+ self.visit(node.test)
+ self.write(" else ")
+ self.visit(node.orelse)
+
+ def visit_Starred(self, node):
+ self.write("*")
+ self.visit(node.value)
+
+ def visit_Repr(self, node):
+ # XXX: python 2.6 only
+ self.write("`")
+ self.visit(node.value)
+ self.write("`")
+
+ # Helper Nodes
+
+ def visit_alias(self, node):
+ self.write(node.name)
+ if node.asname is not None:
+ self.write(" as " + node.asname)
+
+ def visit_comprehension(self, node):
+ self.write(" for ")
+ self.visit(node.target)
+ self.write(" in ")
+ self.visit(node.iter)
+ if node.ifs:
+ for if_ in node.ifs:
+ self.write(" if ")
+ self.visit(if_)
+
+ def visit_excepthandler(self, node):
+ self.newline()
+ self.write("except")
+ if node.type is not None:
+ self.write(" ")
+ self.visit(node.type)
+ if node.name is not None:
+ self.write(" as ")
+ self.visit(node.name)
+ self.write(":")
+ self.body(node.body)
diff --git a/third_party/python/Mako/mako/ast.py b/third_party/python/Mako/mako/ast.py
new file mode 100644
index 0000000000..cfae28062c
--- /dev/null
+++ b/third_party/python/Mako/mako/ast.py
@@ -0,0 +1,205 @@
+# mako/ast.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""utilities for analyzing expressions and blocks of Python
+code, as well as generating Python from AST nodes"""
+
+import re
+
+from mako import compat
+from mako import exceptions
+from mako import pyparser
+
+
+class PythonCode(object):
+
+ """represents information about a string containing Python code"""
+
+ def __init__(self, code, **exception_kwargs):
+ self.code = code
+
+ # represents all identifiers which are assigned to at some point in
+ # the code
+ self.declared_identifiers = set()
+
+ # represents all identifiers which are referenced before their
+ # assignment, if any
+ self.undeclared_identifiers = set()
+
+ # note that an identifier can be in both the undeclared and declared
+ # lists.
+
+ # using AST to parse instead of using code.co_varnames,
+ # code.co_names has several advantages:
+ # - we can locate an identifier as "undeclared" even if
+ # its declared later in the same block of code
+ # - AST is less likely to break with version changes
+ # (for example, the behavior of co_names changed a little bit
+ # in python version 2.5)
+ if isinstance(code, compat.string_types):
+ expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs)
+ else:
+ expr = code
+
+ f = pyparser.FindIdentifiers(self, **exception_kwargs)
+ f.visit(expr)
+
+
+class ArgumentList(object):
+
+ """parses a fragment of code as a comma-separated list of expressions"""
+
+ def __init__(self, code, **exception_kwargs):
+ self.codeargs = []
+ self.args = []
+ self.declared_identifiers = set()
+ self.undeclared_identifiers = set()
+ if isinstance(code, compat.string_types):
+ if re.match(r"\S", code) and not re.match(r",\s*$", code):
+ # if theres text and no trailing comma, insure its parsed
+ # as a tuple by adding a trailing comma
+ code += ","
+ expr = pyparser.parse(code, "exec", **exception_kwargs)
+ else:
+ expr = code
+
+ f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)
+ f.visit(expr)
+
+
+class PythonFragment(PythonCode):
+
+ """extends PythonCode to provide identifier lookups in partial control
+ statements
+
+ e.g.::
+
+ for x in 5:
+ elif y==9:
+ except (MyException, e):
+
+ """
+
+ def __init__(self, code, **exception_kwargs):
+ m = re.match(r"^(\w+)(?:\s+(.*?))?:\s*(#|$)", code.strip(), re.S)
+ if not m:
+ raise exceptions.CompileException(
+ "Fragment '%s' is not a partial control statement" % code,
+ **exception_kwargs
+ )
+ if m.group(3):
+ code = code[: m.start(3)]
+ (keyword, expr) = m.group(1, 2)
+ if keyword in ["for", "if", "while"]:
+ code = code + "pass"
+ elif keyword == "try":
+ code = code + "pass\nexcept:pass"
+ elif keyword == "elif" or keyword == "else":
+ code = "if False:pass\n" + code + "pass"
+ elif keyword == "except":
+ code = "try:pass\n" + code + "pass"
+ elif keyword == "with":
+ code = code + "pass"
+ else:
+ raise exceptions.CompileException(
+ "Unsupported control keyword: '%s'" % keyword,
+ **exception_kwargs
+ )
+ super(PythonFragment, self).__init__(code, **exception_kwargs)
+
+
+class FunctionDecl(object):
+
+ """function declaration"""
+
+ def __init__(self, code, allow_kwargs=True, **exception_kwargs):
+ self.code = code
+ expr = pyparser.parse(code, "exec", **exception_kwargs)
+
+ f = pyparser.ParseFunc(self, **exception_kwargs)
+ f.visit(expr)
+ if not hasattr(self, "funcname"):
+ raise exceptions.CompileException(
+ "Code '%s' is not a function declaration" % code,
+ **exception_kwargs
+ )
+ if not allow_kwargs and self.kwargs:
+ raise exceptions.CompileException(
+ "'**%s' keyword argument not allowed here"
+ % self.kwargnames[-1],
+ **exception_kwargs
+ )
+
+ def get_argument_expressions(self, as_call=False):
+ """Return the argument declarations of this FunctionDecl as a printable
+ list.
+
+ By default the return value is appropriate for writing in a ``def``;
+ set `as_call` to true to build arguments to be passed to the function
+ instead (assuming locals with the same names as the arguments exist).
+ """
+
+ namedecls = []
+
+ # Build in reverse order, since defaults and slurpy args come last
+ argnames = self.argnames[::-1]
+ kwargnames = self.kwargnames[::-1]
+ defaults = self.defaults[::-1]
+ kwdefaults = self.kwdefaults[::-1]
+
+ # Named arguments
+ if self.kwargs:
+ namedecls.append("**" + kwargnames.pop(0))
+
+ for name in kwargnames:
+ # Keyword-only arguments must always be used by name, so even if
+ # this is a call, print out `foo=foo`
+ if as_call:
+ namedecls.append("%s=%s" % (name, name))
+ elif kwdefaults:
+ default = kwdefaults.pop(0)
+ if default is None:
+ # The AST always gives kwargs a default, since you can do
+ # `def foo(*, a=1, b, c=3)`
+ namedecls.append(name)
+ else:
+ namedecls.append(
+ "%s=%s"
+ % (name, pyparser.ExpressionGenerator(default).value())
+ )
+ else:
+ namedecls.append(name)
+
+ # Positional arguments
+ if self.varargs:
+ namedecls.append("*" + argnames.pop(0))
+
+ for name in argnames:
+ if as_call or not defaults:
+ namedecls.append(name)
+ else:
+ default = defaults.pop(0)
+ namedecls.append(
+ "%s=%s"
+ % (name, pyparser.ExpressionGenerator(default).value())
+ )
+
+ namedecls.reverse()
+ return namedecls
+
+ @property
+ def allargnames(self):
+ return tuple(self.argnames) + tuple(self.kwargnames)
+
+
+class FunctionArgs(FunctionDecl):
+
+ """the argument portion of a function declaration"""
+
+ def __init__(self, code, **kwargs):
+ super(FunctionArgs, self).__init__(
+ "def ANON(%s):pass" % code, **kwargs
+ )
diff --git a/third_party/python/Mako/mako/cache.py b/third_party/python/Mako/mako/cache.py
new file mode 100644
index 0000000000..26aa93ee38
--- /dev/null
+++ b/third_party/python/Mako/mako/cache.py
@@ -0,0 +1,240 @@
+# mako/cache.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from mako import compat
+from mako import util
+
+_cache_plugins = util.PluginLoader("mako.cache")
+
+register_plugin = _cache_plugins.register
+register_plugin("beaker", "mako.ext.beaker_cache", "BeakerCacheImpl")
+
+
+class Cache(object):
+
+ """Represents a data content cache made available to the module
+ space of a specific :class:`.Template` object.
+
+ .. versionadded:: 0.6
+ :class:`.Cache` by itself is mostly a
+ container for a :class:`.CacheImpl` object, which implements
+ a fixed API to provide caching services; specific subclasses exist to
+ implement different
+ caching strategies. Mako includes a backend that works with
+ the Beaker caching system. Beaker itself then supports
+ a number of backends (i.e. file, memory, memcached, etc.)
+
+ The construction of a :class:`.Cache` is part of the mechanics
+ of a :class:`.Template`, and programmatic access to this
+ cache is typically via the :attr:`.Template.cache` attribute.
+
+ """
+
+ impl = None
+ """Provide the :class:`.CacheImpl` in use by this :class:`.Cache`.
+
+ This accessor allows a :class:`.CacheImpl` with additional
+ methods beyond that of :class:`.Cache` to be used programmatically.
+
+ """
+
+ id = None
+ """Return the 'id' that identifies this cache.
+
+ This is a value that should be globally unique to the
+ :class:`.Template` associated with this cache, and can
+ be used by a caching system to name a local container
+ for data specific to this template.
+
+ """
+
+ starttime = None
+ """Epochal time value for when the owning :class:`.Template` was
+ first compiled.
+
+ A cache implementation may wish to invalidate data earlier than
+ this timestamp; this has the effect of the cache for a specific
+ :class:`.Template` starting clean any time the :class:`.Template`
+ is recompiled, such as when the original template file changed on
+ the filesystem.
+
+ """
+
+ def __init__(self, template, *args):
+ # check for a stale template calling the
+ # constructor
+ if isinstance(template, compat.string_types) and args:
+ return
+ self.template = template
+ self.id = template.module.__name__
+ self.starttime = template.module._modified_time
+ self._def_regions = {}
+ self.impl = self._load_impl(self.template.cache_impl)
+
+ def _load_impl(self, name):
+ return _cache_plugins.load(name)(self)
+
+ def get_or_create(self, key, creation_function, **kw):
+ """Retrieve a value from the cache, using the given creation function
+ to generate a new value."""
+
+ return self._ctx_get_or_create(key, creation_function, None, **kw)
+
+ def _ctx_get_or_create(self, key, creation_function, context, **kw):
+ """Retrieve a value from the cache, using the given creation function
+ to generate a new value."""
+
+ if not self.template.cache_enabled:
+ return creation_function()
+
+ return self.impl.get_or_create(
+ key, creation_function, **self._get_cache_kw(kw, context)
+ )
+
+ def set(self, key, value, **kw):
+ r"""Place a value in the cache.
+
+ :param key: the value's key.
+ :param value: the value.
+ :param \**kw: cache configuration arguments.
+
+ """
+
+ self.impl.set(key, value, **self._get_cache_kw(kw, None))
+
+ put = set
+ """A synonym for :meth:`.Cache.set`.
+
+ This is here for backwards compatibility.
+
+ """
+
+ def get(self, key, **kw):
+ r"""Retrieve a value from the cache.
+
+ :param key: the value's key.
+ :param \**kw: cache configuration arguments. The
+ backend is configured using these arguments upon first request.
+ Subsequent requests that use the same series of configuration
+ values will use that same backend.
+
+ """
+ return self.impl.get(key, **self._get_cache_kw(kw, None))
+
+ def invalidate(self, key, **kw):
+ r"""Invalidate a value in the cache.
+
+ :param key: the value's key.
+ :param \**kw: cache configuration arguments. The
+ backend is configured using these arguments upon first request.
+ Subsequent requests that use the same series of configuration
+ values will use that same backend.
+
+ """
+ self.impl.invalidate(key, **self._get_cache_kw(kw, None))
+
+ def invalidate_body(self):
+ """Invalidate the cached content of the "body" method for this
+ template.
+
+ """
+ self.invalidate("render_body", __M_defname="render_body")
+
+ def invalidate_def(self, name):
+ """Invalidate the cached content of a particular ``<%def>`` within this
+ template.
+
+ """
+
+ self.invalidate("render_%s" % name, __M_defname="render_%s" % name)
+
+ def invalidate_closure(self, name):
+ """Invalidate a nested ``<%def>`` within this template.
+
+ Caching of nested defs is a blunt tool as there is no
+ management of scope -- nested defs that use cache tags
+ need to have names unique of all other nested defs in the
+ template, else their content will be overwritten by
+ each other.
+
+ """
+
+ self.invalidate(name, __M_defname=name)
+
+ def _get_cache_kw(self, kw, context):
+ defname = kw.pop("__M_defname", None)
+ if not defname:
+ tmpl_kw = self.template.cache_args.copy()
+ tmpl_kw.update(kw)
+ elif defname in self._def_regions:
+ tmpl_kw = self._def_regions[defname]
+ else:
+ tmpl_kw = self.template.cache_args.copy()
+ tmpl_kw.update(kw)
+ self._def_regions[defname] = tmpl_kw
+ if context and self.impl.pass_context:
+ tmpl_kw = tmpl_kw.copy()
+ tmpl_kw.setdefault("context", context)
+ return tmpl_kw
+
+
+class CacheImpl(object):
+
+ """Provide a cache implementation for use by :class:`.Cache`."""
+
+ def __init__(self, cache):
+ self.cache = cache
+
+ pass_context = False
+ """If ``True``, the :class:`.Context` will be passed to
+ :meth:`get_or_create <.CacheImpl.get_or_create>` as the name ``'context'``.
+ """
+
+ def get_or_create(self, key, creation_function, **kw):
+ r"""Retrieve a value from the cache, using the given creation function
+ to generate a new value.
+
+ This function *must* return a value, either from
+ the cache, or via the given creation function.
+ If the creation function is called, the newly
+ created value should be populated into the cache
+ under the given key before being returned.
+
+ :param key: the value's key.
+ :param creation_function: function that when called generates
+ a new value.
+ :param \**kw: cache configuration arguments.
+
+ """
+ raise NotImplementedError()
+
+ def set(self, key, value, **kw):
+ r"""Place a value in the cache.
+
+ :param key: the value's key.
+ :param value: the value.
+ :param \**kw: cache configuration arguments.
+
+ """
+ raise NotImplementedError()
+
+ def get(self, key, **kw):
+ r"""Retrieve a value from the cache.
+
+ :param key: the value's key.
+ :param \**kw: cache configuration arguments.
+
+ """
+ raise NotImplementedError()
+
+ def invalidate(self, key, **kw):
+ r"""Invalidate a value in the cache.
+
+ :param key: the value's key.
+ :param \**kw: cache configuration arguments.
+
+ """
+ raise NotImplementedError()
diff --git a/third_party/python/Mako/mako/cmd.py b/third_party/python/Mako/mako/cmd.py
new file mode 100644
index 0000000000..c0f2c754ec
--- /dev/null
+++ b/third_party/python/Mako/mako/cmd.py
@@ -0,0 +1,103 @@
+# mako/cmd.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+from argparse import ArgumentParser
+import io
+from os.path import dirname
+from os.path import isfile
+import sys
+
+from mako import exceptions
+from mako.lookup import TemplateLookup
+from mako.template import Template
+
+
+def varsplit(var):
+ if "=" not in var:
+ return (var, "")
+ return var.split("=", 1)
+
+
+def _exit():
+ sys.stderr.write(exceptions.text_error_template().render())
+ sys.exit(1)
+
+
+def cmdline(argv=None):
+
+ parser = ArgumentParser()
+ parser.add_argument(
+ "--var",
+ default=[],
+ action="append",
+ help="variable (can be used multiple times, use name=value)",
+ )
+ parser.add_argument(
+ "--template-dir",
+ default=[],
+ action="append",
+ help="Directory to use for template lookup (multiple "
+ "directories may be provided). If not given then if the "
+ "template is read from stdin, the value defaults to be "
+ "the current directory, otherwise it defaults to be the "
+ "parent directory of the file provided.",
+ )
+ parser.add_argument(
+ "--output-encoding", default=None, help="force output encoding"
+ )
+ parser.add_argument(
+ "--output-file",
+ default=None,
+ help="Write to file upon successful render instead of stdout",
+ )
+ parser.add_argument("input", nargs="?", default="-")
+
+ options = parser.parse_args(argv)
+
+ output_encoding = options.output_encoding
+ output_file = options.output_file
+
+ if options.input == "-":
+ lookup_dirs = options.template_dir or ["."]
+ lookup = TemplateLookup(lookup_dirs)
+ try:
+ template = Template(
+ sys.stdin.read(),
+ lookup=lookup,
+ output_encoding=output_encoding,
+ )
+ except:
+ _exit()
+ else:
+ filename = options.input
+ if not isfile(filename):
+ raise SystemExit("error: can't find %s" % filename)
+ lookup_dirs = options.template_dir or [dirname(filename)]
+ lookup = TemplateLookup(lookup_dirs)
+ try:
+ template = Template(
+ filename=filename,
+ lookup=lookup,
+ output_encoding=output_encoding,
+ )
+ except:
+ _exit()
+
+ kw = dict([varsplit(var) for var in options.var])
+ try:
+ rendered = template.render(**kw)
+ except:
+ _exit()
+ else:
+ if output_file:
+ io.open(output_file, "wt", encoding=output_encoding).write(
+ rendered
+ )
+ else:
+ sys.stdout.write(rendered)
+
+
+if __name__ == "__main__":
+ cmdline()
diff --git a/third_party/python/Mako/mako/codegen.py b/third_party/python/Mako/mako/codegen.py
new file mode 100644
index 0000000000..a9ae55b847
--- /dev/null
+++ b/third_party/python/Mako/mako/codegen.py
@@ -0,0 +1,1318 @@
+# mako/codegen.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""provides functionality for rendering a parsetree constructing into module
+source code."""
+
+import json
+import re
+import time
+
+from mako import ast
+from mako import compat
+from mako import exceptions
+from mako import filters
+from mako import parsetree
+from mako import util
+from mako.pygen import PythonPrinter
+
+
+MAGIC_NUMBER = 10
+
+# names which are hardwired into the
+# template and are not accessed via the
+# context itself
+TOPLEVEL_DECLARED = set(["UNDEFINED", "STOP_RENDERING"])
+RESERVED_NAMES = set(["context", "loop"]).union(TOPLEVEL_DECLARED)
+
+
+def compile( # noqa
+ node,
+ uri,
+ filename=None,
+ default_filters=None,
+ buffer_filters=None,
+ imports=None,
+ future_imports=None,
+ source_encoding=None,
+ generate_magic_comment=True,
+ disable_unicode=False,
+ strict_undefined=False,
+ enable_loop=True,
+ reserved_names=frozenset(),
+):
+ """Generate module source code given a parsetree node,
+ uri, and optional source filename"""
+
+ # if on Py2K, push the "source_encoding" string to be
+ # a bytestring itself, as we will be embedding it into
+ # the generated source and we don't want to coerce the
+ # result into a unicode object, in "disable_unicode" mode
+ if not compat.py3k and isinstance(source_encoding, compat.text_type):
+ source_encoding = source_encoding.encode(source_encoding)
+
+ buf = util.FastEncodingBuffer()
+
+ printer = PythonPrinter(buf)
+ _GenerateRenderMethod(
+ printer,
+ _CompileContext(
+ uri,
+ filename,
+ default_filters,
+ buffer_filters,
+ imports,
+ future_imports,
+ source_encoding,
+ generate_magic_comment,
+ disable_unicode,
+ strict_undefined,
+ enable_loop,
+ reserved_names,
+ ),
+ node,
+ )
+ return buf.getvalue()
+
+
+class _CompileContext(object):
+ def __init__(
+ self,
+ uri,
+ filename,
+ default_filters,
+ buffer_filters,
+ imports,
+ future_imports,
+ source_encoding,
+ generate_magic_comment,
+ disable_unicode,
+ strict_undefined,
+ enable_loop,
+ reserved_names,
+ ):
+ self.uri = uri
+ self.filename = filename
+ self.default_filters = default_filters
+ self.buffer_filters = buffer_filters
+ self.imports = imports
+ self.future_imports = future_imports
+ self.source_encoding = source_encoding
+ self.generate_magic_comment = generate_magic_comment
+ self.disable_unicode = disable_unicode
+ self.strict_undefined = strict_undefined
+ self.enable_loop = enable_loop
+ self.reserved_names = reserved_names
+
+
+class _GenerateRenderMethod(object):
+
+ """A template visitor object which generates the
+ full module source for a template.
+
+ """
+
+ def __init__(self, printer, compiler, node):
+ self.printer = printer
+ self.compiler = compiler
+ self.node = node
+ self.identifier_stack = [None]
+ self.in_def = isinstance(node, (parsetree.DefTag, parsetree.BlockTag))
+
+ if self.in_def:
+ name = "render_%s" % node.funcname
+ args = node.get_argument_expressions()
+ filtered = len(node.filter_args.args) > 0
+ buffered = eval(node.attributes.get("buffered", "False"))
+ cached = eval(node.attributes.get("cached", "False"))
+ defs = None
+ pagetag = None
+ if node.is_block and not node.is_anonymous:
+ args += ["**pageargs"]
+ else:
+ defs = self.write_toplevel()
+ pagetag = self.compiler.pagetag
+ name = "render_body"
+ if pagetag is not None:
+ args = pagetag.body_decl.get_argument_expressions()
+ if not pagetag.body_decl.kwargs:
+ args += ["**pageargs"]
+ cached = eval(pagetag.attributes.get("cached", "False"))
+ self.compiler.enable_loop = self.compiler.enable_loop or eval(
+ pagetag.attributes.get("enable_loop", "False")
+ )
+ else:
+ args = ["**pageargs"]
+ cached = False
+ buffered = filtered = False
+ if args is None:
+ args = ["context"]
+ else:
+ args = [a for a in ["context"] + args]
+
+ self.write_render_callable(
+ pagetag or node, name, args, buffered, filtered, cached
+ )
+
+ if defs is not None:
+ for node in defs:
+ _GenerateRenderMethod(printer, compiler, node)
+
+ if not self.in_def:
+ self.write_metadata_struct()
+
+ def write_metadata_struct(self):
+ self.printer.source_map[self.printer.lineno] = max(
+ self.printer.source_map
+ )
+ struct = {
+ "filename": self.compiler.filename,
+ "uri": self.compiler.uri,
+ "source_encoding": self.compiler.source_encoding,
+ "line_map": self.printer.source_map,
+ }
+ self.printer.writelines(
+ '"""',
+ "__M_BEGIN_METADATA",
+ json.dumps(struct),
+ "__M_END_METADATA\n" '"""',
+ )
+
+ @property
+ def identifiers(self):
+ return self.identifier_stack[-1]
+
+ def write_toplevel(self):
+ """Traverse a template structure for module-level directives and
+ generate the start of module-level code.
+
+ """
+ inherit = []
+ namespaces = {}
+ module_code = []
+
+ self.compiler.pagetag = None
+
+ class FindTopLevel(object):
+ def visitInheritTag(s, node):
+ inherit.append(node)
+
+ def visitNamespaceTag(s, node):
+ namespaces[node.name] = node
+
+ def visitPageTag(s, node):
+ self.compiler.pagetag = node
+
+ def visitCode(s, node):
+ if node.ismodule:
+ module_code.append(node)
+
+ f = FindTopLevel()
+ for n in self.node.nodes:
+ n.accept_visitor(f)
+
+ self.compiler.namespaces = namespaces
+
+ module_ident = set()
+ for n in module_code:
+ module_ident = module_ident.union(n.declared_identifiers())
+
+ module_identifiers = _Identifiers(self.compiler)
+ module_identifiers.declared = module_ident
+
+ # module-level names, python code
+ if (
+ self.compiler.generate_magic_comment
+ and self.compiler.source_encoding
+ ):
+ self.printer.writeline(
+ "# -*- coding:%s -*-" % self.compiler.source_encoding
+ )
+
+ if self.compiler.future_imports:
+ self.printer.writeline(
+ "from __future__ import %s"
+ % (", ".join(self.compiler.future_imports),)
+ )
+ self.printer.writeline("from mako import runtime, filters, cache")
+ self.printer.writeline("UNDEFINED = runtime.UNDEFINED")
+ self.printer.writeline("STOP_RENDERING = runtime.STOP_RENDERING")
+ self.printer.writeline("__M_dict_builtin = dict")
+ self.printer.writeline("__M_locals_builtin = locals")
+ self.printer.writeline("_magic_number = %r" % MAGIC_NUMBER)
+ self.printer.writeline("_modified_time = %r" % time.time())
+ self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop)
+ self.printer.writeline(
+ "_template_filename = %r" % self.compiler.filename
+ )
+ self.printer.writeline("_template_uri = %r" % self.compiler.uri)
+ self.printer.writeline(
+ "_source_encoding = %r" % self.compiler.source_encoding
+ )
+ if self.compiler.imports:
+ buf = ""
+ for imp in self.compiler.imports:
+ buf += imp + "\n"
+ self.printer.writeline(imp)
+ impcode = ast.PythonCode(
+ buf,
+ source="",
+ lineno=0,
+ pos=0,
+ filename="template defined imports",
+ )
+ else:
+ impcode = None
+
+ main_identifiers = module_identifiers.branch(self.node)
+ mit = module_identifiers.topleveldefs
+ module_identifiers.topleveldefs = mit.union(
+ main_identifiers.topleveldefs
+ )
+ module_identifiers.declared.update(TOPLEVEL_DECLARED)
+ if impcode:
+ module_identifiers.declared.update(impcode.declared_identifiers)
+
+ self.compiler.identifiers = module_identifiers
+ self.printer.writeline(
+ "_exports = %r"
+ % [n.name for n in main_identifiers.topleveldefs.values()]
+ )
+ self.printer.write_blanks(2)
+
+ if len(module_code):
+ self.write_module_code(module_code)
+
+ if len(inherit):
+ self.write_namespaces(namespaces)
+ self.write_inherit(inherit[-1])
+ elif len(namespaces):
+ self.write_namespaces(namespaces)
+
+ return list(main_identifiers.topleveldefs.values())
+
+ def write_render_callable(
+ self, node, name, args, buffered, filtered, cached
+ ):
+ """write a top-level render callable.
+
+ this could be the main render() method or that of a top-level def."""
+
+ if self.in_def:
+ decorator = node.decorator
+ if decorator:
+ self.printer.writeline(
+ "@runtime._decorate_toplevel(%s)" % decorator
+ )
+
+ self.printer.start_source(node.lineno)
+ self.printer.writelines(
+ "def %s(%s):" % (name, ",".join(args)),
+ # push new frame, assign current frame to __M_caller
+ "__M_caller = context.caller_stack._push_frame()",
+ "try:",
+ )
+ if buffered or filtered or cached:
+ self.printer.writeline("context._push_buffer()")
+
+ self.identifier_stack.append(
+ self.compiler.identifiers.branch(self.node)
+ )
+ if (not self.in_def or self.node.is_block) and "**pageargs" in args:
+ self.identifier_stack[-1].argument_declared.add("pageargs")
+
+ if not self.in_def and (
+ len(self.identifiers.locally_assigned) > 0
+ or len(self.identifiers.argument_declared) > 0
+ ):
+ self.printer.writeline(
+ "__M_locals = __M_dict_builtin(%s)"
+ % ",".join(
+ [
+ "%s=%s" % (x, x)
+ for x in self.identifiers.argument_declared
+ ]
+ )
+ )
+
+ self.write_variable_declares(self.identifiers, toplevel=True)
+
+ for n in self.node.nodes:
+ n.accept_visitor(self)
+
+ self.write_def_finish(self.node, buffered, filtered, cached)
+ self.printer.writeline(None)
+ self.printer.write_blanks(2)
+ if cached:
+ self.write_cache_decorator(
+ node, name, args, buffered, self.identifiers, toplevel=True
+ )
+
+ def write_module_code(self, module_code):
+ """write module-level template code, i.e. that which
+ is enclosed in <%! %> tags in the template."""
+ for n in module_code:
+ self.printer.write_indented_block(n.text, starting_lineno=n.lineno)
+
+ def write_inherit(self, node):
+ """write the module-level inheritance-determination callable."""
+
+ self.printer.writelines(
+ "def _mako_inherit(template, context):",
+ "_mako_generate_namespaces(context)",
+ "return runtime._inherit_from(context, %s, _template_uri)"
+ % (node.parsed_attributes["file"]),
+ None,
+ )
+
+ def write_namespaces(self, namespaces):
+ """write the module-level namespace-generating callable."""
+ self.printer.writelines(
+ "def _mako_get_namespace(context, name):",
+ "try:",
+ "return context.namespaces[(__name__, name)]",
+ "except KeyError:",
+ "_mako_generate_namespaces(context)",
+ "return context.namespaces[(__name__, name)]",
+ None,
+ None,
+ )
+ self.printer.writeline("def _mako_generate_namespaces(context):")
+
+ for node in namespaces.values():
+ if "import" in node.attributes:
+ self.compiler.has_ns_imports = True
+ self.printer.start_source(node.lineno)
+ if len(node.nodes):
+ self.printer.writeline("def make_namespace():")
+ export = []
+ identifiers = self.compiler.identifiers.branch(node)
+ self.in_def = True
+
+ class NSDefVisitor(object):
+ def visitDefTag(s, node):
+ s.visitDefOrBase(node)
+
+ def visitBlockTag(s, node):
+ s.visitDefOrBase(node)
+
+ def visitDefOrBase(s, node):
+ if node.is_anonymous:
+ raise exceptions.CompileException(
+ "Can't put anonymous blocks inside "
+ "<%namespace>",
+ **node.exception_kwargs
+ )
+ self.write_inline_def(node, identifiers, nested=False)
+ export.append(node.funcname)
+
+ vis = NSDefVisitor()
+ for n in node.nodes:
+ n.accept_visitor(vis)
+ self.printer.writeline("return [%s]" % (",".join(export)))
+ self.printer.writeline(None)
+ self.in_def = False
+ callable_name = "make_namespace()"
+ else:
+ callable_name = "None"
+
+ if "file" in node.parsed_attributes:
+ self.printer.writeline(
+ "ns = runtime.TemplateNamespace(%r,"
+ " context._clean_inheritance_tokens(),"
+ " templateuri=%s, callables=%s, "
+ " calling_uri=_template_uri)"
+ % (
+ node.name,
+ node.parsed_attributes.get("file", "None"),
+ callable_name,
+ )
+ )
+ elif "module" in node.parsed_attributes:
+ self.printer.writeline(
+ "ns = runtime.ModuleNamespace(%r,"
+ " context._clean_inheritance_tokens(),"
+ " callables=%s, calling_uri=_template_uri,"
+ " module=%s)"
+ % (
+ node.name,
+ callable_name,
+ node.parsed_attributes.get("module", "None"),
+ )
+ )
+ else:
+ self.printer.writeline(
+ "ns = runtime.Namespace(%r,"
+ " context._clean_inheritance_tokens(),"
+ " callables=%s, calling_uri=_template_uri)"
+ % (node.name, callable_name)
+ )
+ if eval(node.attributes.get("inheritable", "False")):
+ self.printer.writeline("context['self'].%s = ns" % (node.name))
+
+ self.printer.writeline(
+ "context.namespaces[(__name__, %s)] = ns" % repr(node.name)
+ )
+ self.printer.write_blanks(1)
+ if not len(namespaces):
+ self.printer.writeline("pass")
+ self.printer.writeline(None)
+
+ def write_variable_declares(self, identifiers, toplevel=False, limit=None):
+ """write variable declarations at the top of a function.
+
+ the variable declarations are in the form of callable
+ definitions for defs and/or name lookup within the
+ function's context argument. the names declared are based
+ on the names that are referenced in the function body,
+ which don't otherwise have any explicit assignment
+ operation. names that are assigned within the body are
+ assumed to be locally-scoped variables and are not
+ separately declared.
+
+ for def callable definitions, if the def is a top-level
+ callable then a 'stub' callable is generated which wraps
+ the current Context into a closure. if the def is not
+ top-level, it is fully rendered as a local closure.
+
+ """
+
+ # collection of all defs available to us in this scope
+ comp_idents = dict([(c.funcname, c) for c in identifiers.defs])
+ to_write = set()
+
+ # write "context.get()" for all variables we are going to
+ # need that arent in the namespace yet
+ to_write = to_write.union(identifiers.undeclared)
+
+ # write closure functions for closures that we define
+ # right here
+ to_write = to_write.union(
+ [c.funcname for c in identifiers.closuredefs.values()]
+ )
+
+ # remove identifiers that are declared in the argument
+ # signature of the callable
+ to_write = to_write.difference(identifiers.argument_declared)
+
+ # remove identifiers that we are going to assign to.
+ # in this way we mimic Python's behavior,
+ # i.e. assignment to a variable within a block
+ # means that variable is now a "locally declared" var,
+ # which cannot be referenced beforehand.
+ to_write = to_write.difference(identifiers.locally_declared)
+
+ if self.compiler.enable_loop:
+ has_loop = "loop" in to_write
+ to_write.discard("loop")
+ else:
+ has_loop = False
+
+ # if a limiting set was sent, constraint to those items in that list
+ # (this is used for the caching decorator)
+ if limit is not None:
+ to_write = to_write.intersection(limit)
+
+ if toplevel and getattr(self.compiler, "has_ns_imports", False):
+ self.printer.writeline("_import_ns = {}")
+ self.compiler.has_imports = True
+ for ident, ns in self.compiler.namespaces.items():
+ if "import" in ns.attributes:
+ self.printer.writeline(
+ "_mako_get_namespace(context, %r)."
+ "_populate(_import_ns, %r)"
+ % (
+ ident,
+ re.split(r"\s*,\s*", ns.attributes["import"]),
+ )
+ )
+
+ if has_loop:
+ self.printer.writeline("loop = __M_loop = runtime.LoopStack()")
+
+ for ident in to_write:
+ if ident in comp_idents:
+ comp = comp_idents[ident]
+ if comp.is_block:
+ if not comp.is_anonymous:
+ self.write_def_decl(comp, identifiers)
+ else:
+ self.write_inline_def(comp, identifiers, nested=True)
+ else:
+ if comp.is_root():
+ self.write_def_decl(comp, identifiers)
+ else:
+ self.write_inline_def(comp, identifiers, nested=True)
+
+ elif ident in self.compiler.namespaces:
+ self.printer.writeline(
+ "%s = _mako_get_namespace(context, %r)" % (ident, ident)
+ )
+ else:
+ if getattr(self.compiler, "has_ns_imports", False):
+ if self.compiler.strict_undefined:
+ self.printer.writelines(
+ "%s = _import_ns.get(%r, UNDEFINED)"
+ % (ident, ident),
+ "if %s is UNDEFINED:" % ident,
+ "try:",
+ "%s = context[%r]" % (ident, ident),
+ "except KeyError:",
+ "raise NameError(\"'%s' is not defined\")" % ident,
+ None,
+ None,
+ )
+ else:
+ self.printer.writeline(
+ "%s = _import_ns.get"
+ "(%r, context.get(%r, UNDEFINED))"
+ % (ident, ident, ident)
+ )
+ else:
+ if self.compiler.strict_undefined:
+ self.printer.writelines(
+ "try:",
+ "%s = context[%r]" % (ident, ident),
+ "except KeyError:",
+ "raise NameError(\"'%s' is not defined\")" % ident,
+ None,
+ )
+ else:
+ self.printer.writeline(
+ "%s = context.get(%r, UNDEFINED)" % (ident, ident)
+ )
+
+ self.printer.writeline("__M_writer = context.writer()")
+
+ def write_def_decl(self, node, identifiers):
+ """write a locally-available callable referencing a top-level def"""
+ funcname = node.funcname
+ namedecls = node.get_argument_expressions()
+ nameargs = node.get_argument_expressions(as_call=True)
+
+ if not self.in_def and (
+ len(self.identifiers.locally_assigned) > 0
+ or len(self.identifiers.argument_declared) > 0
+ ):
+ nameargs.insert(0, "context._locals(__M_locals)")
+ else:
+ nameargs.insert(0, "context")
+ self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls)))
+ self.printer.writeline(
+ "return render_%s(%s)" % (funcname, ",".join(nameargs))
+ )
+ self.printer.writeline(None)
+
+ def write_inline_def(self, node, identifiers, nested):
+ """write a locally-available def callable inside an enclosing def."""
+
+ namedecls = node.get_argument_expressions()
+
+ decorator = node.decorator
+ if decorator:
+ self.printer.writeline(
+ "@runtime._decorate_inline(context, %s)" % decorator
+ )
+ self.printer.writeline(
+ "def %s(%s):" % (node.funcname, ",".join(namedecls))
+ )
+ filtered = len(node.filter_args.args) > 0
+ buffered = eval(node.attributes.get("buffered", "False"))
+ cached = eval(node.attributes.get("cached", "False"))
+ self.printer.writelines(
+ # push new frame, assign current frame to __M_caller
+ "__M_caller = context.caller_stack._push_frame()",
+ "try:",
+ )
+ if buffered or filtered or cached:
+ self.printer.writelines("context._push_buffer()")
+
+ identifiers = identifiers.branch(node, nested=nested)
+
+ self.write_variable_declares(identifiers)
+
+ self.identifier_stack.append(identifiers)
+ for n in node.nodes:
+ n.accept_visitor(self)
+ self.identifier_stack.pop()
+
+ self.write_def_finish(node, buffered, filtered, cached)
+ self.printer.writeline(None)
+ if cached:
+ self.write_cache_decorator(
+ node,
+ node.funcname,
+ namedecls,
+ False,
+ identifiers,
+ inline=True,
+ toplevel=False,
+ )
+
+ def write_def_finish(
+ self, node, buffered, filtered, cached, callstack=True
+ ):
+ """write the end section of a rendering function, either outermost or
+ inline.
+
+ this takes into account if the rendering function was filtered,
+ buffered, etc. and closes the corresponding try: block if any, and
+ writes code to retrieve captured content, apply filters, send proper
+ return value."""
+
+ if not buffered and not cached and not filtered:
+ self.printer.writeline("return ''")
+ if callstack:
+ self.printer.writelines(
+ "finally:", "context.caller_stack._pop_frame()", None
+ )
+
+ if buffered or filtered or cached:
+ if buffered or cached:
+ # in a caching scenario, don't try to get a writer
+ # from the context after popping; assume the caching
+ # implemenation might be using a context with no
+ # extra buffers
+ self.printer.writelines(
+ "finally:", "__M_buf = context._pop_buffer()"
+ )
+ else:
+ self.printer.writelines(
+ "finally:",
+ "__M_buf, __M_writer = context._pop_buffer_and_writer()",
+ )
+
+ if callstack:
+ self.printer.writeline("context.caller_stack._pop_frame()")
+
+ s = "__M_buf.getvalue()"
+ if filtered:
+ s = self.create_filter_callable(
+ node.filter_args.args, s, False
+ )
+ self.printer.writeline(None)
+ if buffered and not cached:
+ s = self.create_filter_callable(
+ self.compiler.buffer_filters, s, False
+ )
+ if buffered or cached:
+ self.printer.writeline("return %s" % s)
+ else:
+ self.printer.writelines("__M_writer(%s)" % s, "return ''")
+
+ def write_cache_decorator(
+ self,
+ node_or_pagetag,
+ name,
+ args,
+ buffered,
+ identifiers,
+ inline=False,
+ toplevel=False,
+ ):
+ """write a post-function decorator to replace a rendering
+ callable with a cached version of itself."""
+
+ self.printer.writeline("__M_%s = %s" % (name, name))
+ cachekey = node_or_pagetag.parsed_attributes.get(
+ "cache_key", repr(name)
+ )
+
+ cache_args = {}
+ if self.compiler.pagetag is not None:
+ cache_args.update(
+ (pa[6:], self.compiler.pagetag.parsed_attributes[pa])
+ for pa in self.compiler.pagetag.parsed_attributes
+ if pa.startswith("cache_") and pa != "cache_key"
+ )
+ cache_args.update(
+ (pa[6:], node_or_pagetag.parsed_attributes[pa])
+ for pa in node_or_pagetag.parsed_attributes
+ if pa.startswith("cache_") and pa != "cache_key"
+ )
+ if "timeout" in cache_args:
+ cache_args["timeout"] = int(eval(cache_args["timeout"]))
+
+ self.printer.writeline("def %s(%s):" % (name, ",".join(args)))
+
+ # form "arg1, arg2, arg3=arg3, arg4=arg4", etc.
+ pass_args = [
+ "%s=%s" % ((a.split("=")[0],) * 2) if "=" in a else a for a in args
+ ]
+
+ self.write_variable_declares(
+ identifiers,
+ toplevel=toplevel,
+ limit=node_or_pagetag.undeclared_identifiers(),
+ )
+ if buffered:
+ s = (
+ "context.get('local')."
+ "cache._ctx_get_or_create("
+ "%s, lambda:__M_%s(%s), context, %s__M_defname=%r)"
+ % (
+ cachekey,
+ name,
+ ",".join(pass_args),
+ "".join(
+ ["%s=%s, " % (k, v) for k, v in cache_args.items()]
+ ),
+ name,
+ )
+ )
+ # apply buffer_filters
+ s = self.create_filter_callable(
+ self.compiler.buffer_filters, s, False
+ )
+ self.printer.writelines("return " + s, None)
+ else:
+ self.printer.writelines(
+ "__M_writer(context.get('local')."
+ "cache._ctx_get_or_create("
+ "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))"
+ % (
+ cachekey,
+ name,
+ ",".join(pass_args),
+ "".join(
+ ["%s=%s, " % (k, v) for k, v in cache_args.items()]
+ ),
+ name,
+ ),
+ "return ''",
+ None,
+ )
+
+ def create_filter_callable(self, args, target, is_expression):
+ """write a filter-applying expression based on the filters
+ present in the given filter names, adjusting for the global
+ 'default' filter aliases as needed."""
+
+ def locate_encode(name):
+ if re.match(r"decode\..+", name):
+ return "filters." + name
+ elif self.compiler.disable_unicode:
+ return filters.NON_UNICODE_ESCAPES.get(name, name)
+ else:
+ return filters.DEFAULT_ESCAPES.get(name, name)
+
+ if "n" not in args:
+ if is_expression:
+ if self.compiler.pagetag:
+ args = self.compiler.pagetag.filter_args.args + args
+ if self.compiler.default_filters and "n" not in args:
+ args = self.compiler.default_filters + args
+ for e in args:
+ # if filter given as a function, get just the identifier portion
+ if e == "n":
+ continue
+ m = re.match(r"(.+?)(\(.*\))", e)
+ if m:
+ ident, fargs = m.group(1, 2)
+ f = locate_encode(ident)
+ e = f + fargs
+ else:
+ e = locate_encode(e)
+ assert e is not None
+ target = "%s(%s)" % (e, target)
+ return target
+
+ def visitExpression(self, node):
+ self.printer.start_source(node.lineno)
+ if (
+ len(node.escapes)
+ or (
+ self.compiler.pagetag is not None
+ and len(self.compiler.pagetag.filter_args.args)
+ )
+ or len(self.compiler.default_filters)
+ ):
+
+ s = self.create_filter_callable(
+ node.escapes_code.args, "%s" % node.text, True
+ )
+ self.printer.writeline("__M_writer(%s)" % s)
+ else:
+ self.printer.writeline("__M_writer(%s)" % node.text)
+
+ def visitControlLine(self, node):
+ if node.isend:
+ self.printer.writeline(None)
+ if node.has_loop_context:
+ self.printer.writeline("finally:")
+ self.printer.writeline("loop = __M_loop._exit()")
+ self.printer.writeline(None)
+ else:
+ self.printer.start_source(node.lineno)
+ if self.compiler.enable_loop and node.keyword == "for":
+ text = mangle_mako_loop(node, self.printer)
+ else:
+ text = node.text
+ self.printer.writeline(text)
+ children = node.get_children()
+ # this covers the three situations where we want to insert a pass:
+ # 1) a ternary control line with no children,
+ # 2) a primary control line with nothing but its own ternary
+ # and end control lines, and
+ # 3) any control line with no content other than comments
+ if not children or (
+ compat.all(
+ isinstance(c, (parsetree.Comment, parsetree.ControlLine))
+ for c in children
+ )
+ and compat.all(
+ (node.is_ternary(c.keyword) or c.isend)
+ for c in children
+ if isinstance(c, parsetree.ControlLine)
+ )
+ ):
+ self.printer.writeline("pass")
+
+ def visitText(self, node):
+ self.printer.start_source(node.lineno)
+ self.printer.writeline("__M_writer(%s)" % repr(node.content))
+
+ def visitTextTag(self, node):
+ filtered = len(node.filter_args.args) > 0
+ if filtered:
+ self.printer.writelines(
+ "__M_writer = context._push_writer()", "try:"
+ )
+ for n in node.nodes:
+ n.accept_visitor(self)
+ if filtered:
+ self.printer.writelines(
+ "finally:",
+ "__M_buf, __M_writer = context._pop_buffer_and_writer()",
+ "__M_writer(%s)"
+ % self.create_filter_callable(
+ node.filter_args.args, "__M_buf.getvalue()", False
+ ),
+ None,
+ )
+
+ def visitCode(self, node):
+ if not node.ismodule:
+ self.printer.write_indented_block(
+ node.text, starting_lineno=node.lineno
+ )
+
+ if not self.in_def and len(self.identifiers.locally_assigned) > 0:
+ # if we are the "template" def, fudge locally
+ # declared/modified variables into the "__M_locals" dictionary,
+ # which is used for def calls within the same template,
+ # to simulate "enclosing scope"
+ self.printer.writeline(
+ "__M_locals_builtin_stored = __M_locals_builtin()"
+ )
+ self.printer.writeline(
+ "__M_locals.update(__M_dict_builtin([(__M_key,"
+ " __M_locals_builtin_stored[__M_key]) for __M_key in"
+ " [%s] if __M_key in __M_locals_builtin_stored]))"
+ % ",".join([repr(x) for x in node.declared_identifiers()])
+ )
+
+ def visitIncludeTag(self, node):
+ self.printer.start_source(node.lineno)
+ args = node.attributes.get("args")
+ if args:
+ self.printer.writeline(
+ "runtime._include_file(context, %s, _template_uri, %s)"
+ % (node.parsed_attributes["file"], args)
+ )
+ else:
+ self.printer.writeline(
+ "runtime._include_file(context, %s, _template_uri)"
+ % (node.parsed_attributes["file"])
+ )
+
+ def visitNamespaceTag(self, node):
+ pass
+
+ def visitDefTag(self, node):
+ pass
+
+ def visitBlockTag(self, node):
+ if node.is_anonymous:
+ self.printer.writeline("%s()" % node.funcname)
+ else:
+ nameargs = node.get_argument_expressions(as_call=True)
+ nameargs += ["**pageargs"]
+ self.printer.writeline(
+ "if 'parent' not in context._data or "
+ "not hasattr(context._data['parent'], '%s'):" % node.funcname
+ )
+ self.printer.writeline(
+ "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs))
+ )
+ self.printer.writeline("\n")
+
+ def visitCallNamespaceTag(self, node):
+ # TODO: we can put namespace-specific checks here, such
+ # as ensure the given namespace will be imported,
+ # pre-import the namespace, etc.
+ self.visitCallTag(node)
+
+ def visitCallTag(self, node):
+ self.printer.writeline("def ccall(caller):")
+ export = ["body"]
+ callable_identifiers = self.identifiers.branch(node, nested=True)
+ body_identifiers = callable_identifiers.branch(node, nested=False)
+ # we want the 'caller' passed to ccall to be used
+ # for the body() function, but for other non-body()
+ # <%def>s within <%call> we want the current caller
+ # off the call stack (if any)
+ body_identifiers.add_declared("caller")
+
+ self.identifier_stack.append(body_identifiers)
+
+ class DefVisitor(object):
+ def visitDefTag(s, node):
+ s.visitDefOrBase(node)
+
+ def visitBlockTag(s, node):
+ s.visitDefOrBase(node)
+
+ def visitDefOrBase(s, node):
+ self.write_inline_def(node, callable_identifiers, nested=False)
+ if not node.is_anonymous:
+ export.append(node.funcname)
+ # remove defs that are within the <%call> from the
+ # "closuredefs" defined in the body, so they dont render twice
+ if node.funcname in body_identifiers.closuredefs:
+ del body_identifiers.closuredefs[node.funcname]
+
+ vis = DefVisitor()
+ for n in node.nodes:
+ n.accept_visitor(vis)
+ self.identifier_stack.pop()
+
+ bodyargs = node.body_decl.get_argument_expressions()
+ self.printer.writeline("def body(%s):" % ",".join(bodyargs))
+
+ # TODO: figure out best way to specify
+ # buffering/nonbuffering (at call time would be better)
+ buffered = False
+ if buffered:
+ self.printer.writelines("context._push_buffer()", "try:")
+ self.write_variable_declares(body_identifiers)
+ self.identifier_stack.append(body_identifiers)
+
+ for n in node.nodes:
+ n.accept_visitor(self)
+ self.identifier_stack.pop()
+
+ self.write_def_finish(node, buffered, False, False, callstack=False)
+ self.printer.writelines(None, "return [%s]" % (",".join(export)), None)
+
+ self.printer.writelines(
+ # push on caller for nested call
+ "context.caller_stack.nextcaller = "
+ "runtime.Namespace('caller', context, "
+ "callables=ccall(__M_caller))",
+ "try:",
+ )
+ self.printer.start_source(node.lineno)
+ self.printer.writelines(
+ "__M_writer(%s)"
+ % self.create_filter_callable([], node.expression, True),
+ "finally:",
+ "context.caller_stack.nextcaller = None",
+ None,
+ )
+
+
+class _Identifiers(object):
+
+ """tracks the status of identifier names as template code is rendered."""
+
+ def __init__(self, compiler, node=None, parent=None, nested=False):
+ if parent is not None:
+ # if we are the branch created in write_namespaces(),
+ # we don't share any context from the main body().
+ if isinstance(node, parsetree.NamespaceTag):
+ self.declared = set()
+ self.topleveldefs = util.SetLikeDict()
+ else:
+ # things that have already been declared
+ # in an enclosing namespace (i.e. names we can just use)
+ self.declared = (
+ set(parent.declared)
+ .union([c.name for c in parent.closuredefs.values()])
+ .union(parent.locally_declared)
+ .union(parent.argument_declared)
+ )
+
+ # if these identifiers correspond to a "nested"
+ # scope, it means whatever the parent identifiers
+ # had as undeclared will have been declared by that parent,
+ # and therefore we have them in our scope.
+ if nested:
+ self.declared = self.declared.union(parent.undeclared)
+
+ # top level defs that are available
+ self.topleveldefs = util.SetLikeDict(**parent.topleveldefs)
+ else:
+ self.declared = set()
+ self.topleveldefs = util.SetLikeDict()
+
+ self.compiler = compiler
+
+ # things within this level that are referenced before they
+ # are declared (e.g. assigned to)
+ self.undeclared = set()
+
+ # things that are declared locally. some of these things
+ # could be in the "undeclared" list as well if they are
+ # referenced before declared
+ self.locally_declared = set()
+
+ # assignments made in explicit python blocks.
+ # these will be propagated to
+ # the context of local def calls.
+ self.locally_assigned = set()
+
+ # things that are declared in the argument
+ # signature of the def callable
+ self.argument_declared = set()
+
+ # closure defs that are defined in this level
+ self.closuredefs = util.SetLikeDict()
+
+ self.node = node
+
+ if node is not None:
+ node.accept_visitor(self)
+
+ illegal_names = self.compiler.reserved_names.intersection(
+ self.locally_declared
+ )
+ if illegal_names:
+ raise exceptions.NameConflictError(
+ "Reserved words declared in template: %s"
+ % ", ".join(illegal_names)
+ )
+
+ def branch(self, node, **kwargs):
+ """create a new Identifiers for a new Node, with
+ this Identifiers as the parent."""
+
+ return _Identifiers(self.compiler, node, self, **kwargs)
+
+ @property
+ def defs(self):
+ return set(self.topleveldefs.union(self.closuredefs).values())
+
+ def __repr__(self):
+ return (
+ "Identifiers(declared=%r, locally_declared=%r, "
+ "undeclared=%r, topleveldefs=%r, closuredefs=%r, "
+ "argumentdeclared=%r)"
+ % (
+ list(self.declared),
+ list(self.locally_declared),
+ list(self.undeclared),
+ [c.name for c in self.topleveldefs.values()],
+ [c.name for c in self.closuredefs.values()],
+ self.argument_declared,
+ )
+ )
+
+ def check_declared(self, node):
+ """update the state of this Identifiers with the undeclared
+ and declared identifiers of the given node."""
+
+ for ident in node.undeclared_identifiers():
+ if ident != "context" and ident not in self.declared.union(
+ self.locally_declared
+ ):
+ self.undeclared.add(ident)
+ for ident in node.declared_identifiers():
+ self.locally_declared.add(ident)
+
+ def add_declared(self, ident):
+ self.declared.add(ident)
+ if ident in self.undeclared:
+ self.undeclared.remove(ident)
+
+ def visitExpression(self, node):
+ self.check_declared(node)
+
+ def visitControlLine(self, node):
+ self.check_declared(node)
+
+ def visitCode(self, node):
+ if not node.ismodule:
+ self.check_declared(node)
+ self.locally_assigned = self.locally_assigned.union(
+ node.declared_identifiers()
+ )
+
+ def visitNamespaceTag(self, node):
+ # only traverse into the sub-elements of a
+ # <%namespace> tag if we are the branch created in
+ # write_namespaces()
+ if self.node is node:
+ for n in node.nodes:
+ n.accept_visitor(self)
+
+ def _check_name_exists(self, collection, node):
+ existing = collection.get(node.funcname)
+ collection[node.funcname] = node
+ if (
+ existing is not None
+ and existing is not node
+ and (node.is_block or existing.is_block)
+ ):
+ raise exceptions.CompileException(
+ "%%def or %%block named '%s' already "
+ "exists in this template." % node.funcname,
+ **node.exception_kwargs
+ )
+
+ def visitDefTag(self, node):
+ if node.is_root() and not node.is_anonymous:
+ self._check_name_exists(self.topleveldefs, node)
+ elif node is not self.node:
+ self._check_name_exists(self.closuredefs, node)
+
+ for ident in node.undeclared_identifiers():
+ if ident != "context" and ident not in self.declared.union(
+ self.locally_declared
+ ):
+ self.undeclared.add(ident)
+
+ # visit defs only one level deep
+ if node is self.node:
+ for ident in node.declared_identifiers():
+ self.argument_declared.add(ident)
+
+ for n in node.nodes:
+ n.accept_visitor(self)
+
+ def visitBlockTag(self, node):
+ if node is not self.node and not node.is_anonymous:
+
+ if isinstance(self.node, parsetree.DefTag):
+ raise exceptions.CompileException(
+ "Named block '%s' not allowed inside of def '%s'"
+ % (node.name, self.node.name),
+ **node.exception_kwargs
+ )
+ elif isinstance(
+ self.node, (parsetree.CallTag, parsetree.CallNamespaceTag)
+ ):
+ raise exceptions.CompileException(
+ "Named block '%s' not allowed inside of <%%call> tag"
+ % (node.name,),
+ **node.exception_kwargs
+ )
+
+ for ident in node.undeclared_identifiers():
+ if ident != "context" and ident not in self.declared.union(
+ self.locally_declared
+ ):
+ self.undeclared.add(ident)
+
+ if not node.is_anonymous:
+ self._check_name_exists(self.topleveldefs, node)
+ self.undeclared.add(node.funcname)
+ elif node is not self.node:
+ self._check_name_exists(self.closuredefs, node)
+ for ident in node.declared_identifiers():
+ self.argument_declared.add(ident)
+ for n in node.nodes:
+ n.accept_visitor(self)
+
+ def visitTextTag(self, node):
+ for ident in node.undeclared_identifiers():
+ if ident != "context" and ident not in self.declared.union(
+ self.locally_declared
+ ):
+ self.undeclared.add(ident)
+
+ def visitIncludeTag(self, node):
+ self.check_declared(node)
+
+ def visitPageTag(self, node):
+ for ident in node.declared_identifiers():
+ self.argument_declared.add(ident)
+ self.check_declared(node)
+
+ def visitCallNamespaceTag(self, node):
+ self.visitCallTag(node)
+
+ def visitCallTag(self, node):
+ if node is self.node:
+ for ident in node.undeclared_identifiers():
+ if ident != "context" and ident not in self.declared.union(
+ self.locally_declared
+ ):
+ self.undeclared.add(ident)
+ for ident in node.declared_identifiers():
+ self.argument_declared.add(ident)
+ for n in node.nodes:
+ n.accept_visitor(self)
+ else:
+ for ident in node.undeclared_identifiers():
+ if ident != "context" and ident not in self.declared.union(
+ self.locally_declared
+ ):
+ self.undeclared.add(ident)
+
+
+_FOR_LOOP = re.compile(
+ r"^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*"
+ r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):"
+)
+
+
+def mangle_mako_loop(node, printer):
+ """converts a for loop into a context manager wrapped around a for loop
+ when access to the `loop` variable has been detected in the for loop body
+ """
+ loop_variable = LoopVariable()
+ node.accept_visitor(loop_variable)
+ if loop_variable.detected:
+ node.nodes[-1].has_loop_context = True
+ match = _FOR_LOOP.match(node.text)
+ if match:
+ printer.writelines(
+ "loop = __M_loop._enter(%s)" % match.group(2),
+ "try:"
+ # 'with __M_loop(%s) as loop:' % match.group(2)
+ )
+ text = "for %s in loop:" % match.group(1)
+ else:
+ raise SyntaxError("Couldn't apply loop context: %s" % node.text)
+ else:
+ text = node.text
+ return text
+
+
+class LoopVariable(object):
+
+ """A node visitor which looks for the name 'loop' within undeclared
+ identifiers."""
+
+ def __init__(self):
+ self.detected = False
+
+ def _loop_reference_detected(self, node):
+ if "loop" in node.undeclared_identifiers():
+ self.detected = True
+ else:
+ for n in node.get_children():
+ n.accept_visitor(self)
+
+ def visitControlLine(self, node):
+ self._loop_reference_detected(node)
+
+ def visitCode(self, node):
+ self._loop_reference_detected(node)
+
+ def visitExpression(self, node):
+ self._loop_reference_detected(node)
diff --git a/third_party/python/Mako/mako/compat.py b/third_party/python/Mako/mako/compat.py
new file mode 100644
index 0000000000..9aac98cb5b
--- /dev/null
+++ b/third_party/python/Mako/mako/compat.py
@@ -0,0 +1,166 @@
+# mako/compat.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import collections
+import inspect
+import sys
+
+py3k = sys.version_info >= (3, 0)
+py2k = sys.version_info < (3,)
+py27 = sys.version_info >= (2, 7)
+jython = sys.platform.startswith("java")
+win32 = sys.platform.startswith("win")
+pypy = hasattr(sys, "pypy_version_info")
+
+ArgSpec = collections.namedtuple(
+ "ArgSpec", ["args", "varargs", "keywords", "defaults"]
+)
+
+
+def inspect_getargspec(func):
+ """getargspec based on fully vendored getfullargspec from Python 3.3."""
+
+ if inspect.ismethod(func):
+ func = func.__func__
+ if not inspect.isfunction(func):
+ raise TypeError("{!r} is not a Python function".format(func))
+
+ co = func.__code__
+ if not inspect.iscode(co):
+ raise TypeError("{!r} is not a code object".format(co))
+
+ nargs = co.co_argcount
+ names = co.co_varnames
+ nkwargs = co.co_kwonlyargcount if py3k else 0
+ args = list(names[:nargs])
+
+ nargs += nkwargs
+ varargs = None
+ if co.co_flags & inspect.CO_VARARGS:
+ varargs = co.co_varnames[nargs]
+ nargs = nargs + 1
+ varkw = None
+ if co.co_flags & inspect.CO_VARKEYWORDS:
+ varkw = co.co_varnames[nargs]
+
+ return ArgSpec(args, varargs, varkw, func.__defaults__)
+
+
+if py3k:
+ from io import StringIO
+ import builtins as compat_builtins
+ from urllib.parse import quote_plus, unquote_plus
+ from html.entities import codepoint2name, name2codepoint
+
+ string_types = (str,)
+ binary_type = bytes
+ text_type = str
+
+ from io import BytesIO as byte_buffer
+
+ def u(s):
+ return s
+
+ def b(s):
+ return s.encode("latin-1")
+
+ def octal(lit):
+ return eval("0o" + lit)
+
+
+else:
+ import __builtin__ as compat_builtins # noqa
+
+ try:
+ from cStringIO import StringIO
+ except:
+ from StringIO import StringIO
+
+ byte_buffer = StringIO
+
+ from urllib import quote_plus, unquote_plus # noqa
+ from htmlentitydefs import codepoint2name, name2codepoint # noqa
+
+ string_types = (basestring,) # noqa
+ binary_type = str
+ text_type = unicode # noqa
+
+ def u(s):
+ return unicode(s, "utf-8") # noqa
+
+ def b(s):
+ return s
+
+ def octal(lit):
+ return eval("0" + lit)
+
+
+if py3k:
+ from importlib import machinery
+
+ def load_module(module_id, path):
+ return machinery.SourceFileLoader(module_id, path).load_module()
+
+
+else:
+ import imp
+
+ def load_module(module_id, path):
+ fp = open(path, "rb")
+ try:
+ return imp.load_source(module_id, path, fp)
+ finally:
+ fp.close()
+
+
+if py3k:
+
+ def reraise(tp, value, tb=None, cause=None):
+ if cause is not None:
+ value.__cause__ = cause
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+
+else:
+ exec(
+ "def reraise(tp, value, tb=None, cause=None):\n"
+ " raise tp, value, tb\n"
+ )
+
+
+def exception_as():
+ return sys.exc_info()[1]
+
+
+all = all # noqa
+
+
+def exception_name(exc):
+ return exc.__class__.__name__
+
+
+################################################
+# cross-compatible metaclass implementation
+# Copyright (c) 2010-2012 Benjamin Peterson
+def with_metaclass(meta, base=object):
+ """Create a base class with a metaclass."""
+ return meta("%sBase" % meta.__name__, (base,), {})
+
+
+################################################
+
+
+def arg_stringname(func_arg):
+ """Gets the string name of a kwarg or vararg
+ In Python3.4 a function's args are
+ of _ast.arg type not _ast.name
+ """
+ if hasattr(func_arg, "arg"):
+ return func_arg.arg
+ else:
+ return str(func_arg)
diff --git a/third_party/python/Mako/mako/exceptions.py b/third_party/python/Mako/mako/exceptions.py
new file mode 100644
index 0000000000..ea7b20dbdd
--- /dev/null
+++ b/third_party/python/Mako/mako/exceptions.py
@@ -0,0 +1,430 @@
+# mako/exceptions.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""exception classes"""
+
+import sys
+import traceback
+
+from mako import compat
+from mako import util
+
+
+class MakoException(Exception):
+ pass
+
+
+class RuntimeException(MakoException):
+ pass
+
+
+def _format_filepos(lineno, pos, filename):
+ if filename is None:
+ return " at line: %d char: %d" % (lineno, pos)
+ else:
+ return " in file '%s' at line: %d char: %d" % (filename, lineno, pos)
+
+
+class CompileException(MakoException):
+ def __init__(self, message, source, lineno, pos, filename):
+ MakoException.__init__(
+ self, message + _format_filepos(lineno, pos, filename)
+ )
+ self.lineno = lineno
+ self.pos = pos
+ self.filename = filename
+ self.source = source
+
+
+class SyntaxException(MakoException):
+ def __init__(self, message, source, lineno, pos, filename):
+ MakoException.__init__(
+ self, message + _format_filepos(lineno, pos, filename)
+ )
+ self.lineno = lineno
+ self.pos = pos
+ self.filename = filename
+ self.source = source
+
+
+class UnsupportedError(MakoException):
+
+ """raised when a retired feature is used."""
+
+
+class NameConflictError(MakoException):
+
+ """raised when a reserved word is used inappropriately"""
+
+
+class TemplateLookupException(MakoException):
+ pass
+
+
+class TopLevelLookupException(TemplateLookupException):
+ pass
+
+
+class RichTraceback(object):
+
+ """Pull the current exception from the ``sys`` traceback and extracts
+ Mako-specific template information.
+
+ See the usage examples in :ref:`handling_exceptions`.
+
+ """
+
+ def __init__(self, error=None, traceback=None):
+ self.source, self.lineno = "", 0
+
+ if error is None or traceback is None:
+ t, value, tback = sys.exc_info()
+
+ if error is None:
+ error = value or t
+
+ if traceback is None:
+ traceback = tback
+
+ self.error = error
+ self.records = self._init(traceback)
+
+ if isinstance(self.error, (CompileException, SyntaxException)):
+ self.source = self.error.source
+ self.lineno = self.error.lineno
+ self._has_source = True
+
+ self._init_message()
+
+ @property
+ def errorname(self):
+ return compat.exception_name(self.error)
+
+ def _init_message(self):
+ """Find a unicode representation of self.error"""
+ try:
+ self.message = compat.text_type(self.error)
+ except UnicodeError:
+ try:
+ self.message = str(self.error)
+ except UnicodeEncodeError:
+ # Fallback to args as neither unicode nor
+ # str(Exception(u'\xe6')) work in Python < 2.6
+ self.message = self.error.args[0]
+ if not isinstance(self.message, compat.text_type):
+ self.message = compat.text_type(self.message, "ascii", "replace")
+
+ def _get_reformatted_records(self, records):
+ for rec in records:
+ if rec[6] is not None:
+ yield (rec[4], rec[5], rec[2], rec[6])
+ else:
+ yield tuple(rec[0:4])
+
+ @property
+ def traceback(self):
+ """Return a list of 4-tuple traceback records (i.e. normal python
+ format) with template-corresponding lines remapped to the originating
+ template.
+
+ """
+ return list(self._get_reformatted_records(self.records))
+
+ @property
+ def reverse_records(self):
+ return reversed(self.records)
+
+ @property
+ def reverse_traceback(self):
+ """Return the same data as traceback, except in reverse order.
+ """
+
+ return list(self._get_reformatted_records(self.reverse_records))
+
+ def _init(self, trcback):
+ """format a traceback from sys.exc_info() into 7-item tuples,
+ containing the regular four traceback tuple items, plus the original
+ template filename, the line number adjusted relative to the template
+ source, and code line from that line number of the template."""
+
+ import mako.template
+
+ mods = {}
+ rawrecords = traceback.extract_tb(trcback)
+ new_trcback = []
+ for filename, lineno, function, line in rawrecords:
+ if not line:
+ line = ""
+ try:
+ (line_map, template_lines, template_filename) = mods[filename]
+ except KeyError:
+ try:
+ info = mako.template._get_module_info(filename)
+ module_source = info.code
+ template_source = info.source
+ template_filename = (
+ info.template_filename or info.template_uri or filename
+ )
+ except KeyError:
+ # A normal .py file (not a Template)
+ if not compat.py3k:
+ try:
+ fp = open(filename, "rb")
+ encoding = util.parse_encoding(fp)
+ fp.close()
+ except IOError:
+ encoding = None
+ if encoding:
+ line = line.decode(encoding)
+ else:
+ line = line.decode("ascii", "replace")
+ new_trcback.append(
+ (
+ filename,
+ lineno,
+ function,
+ line,
+ None,
+ None,
+ None,
+ None,
+ )
+ )
+ continue
+
+ template_ln = 1
+
+ mtm = mako.template.ModuleInfo
+ source_map = mtm.get_module_source_metadata(
+ module_source, full_line_map=True
+ )
+ line_map = source_map["full_line_map"]
+
+ template_lines = [
+ line_ for line_ in template_source.split("\n")
+ ]
+ mods[filename] = (line_map, template_lines, template_filename)
+
+ template_ln = line_map[lineno - 1]
+
+ if template_ln <= len(template_lines):
+ template_line = template_lines[template_ln - 1]
+ else:
+ template_line = None
+ new_trcback.append(
+ (
+ filename,
+ lineno,
+ function,
+ line,
+ template_filename,
+ template_ln,
+ template_line,
+ template_source,
+ )
+ )
+ if not self.source:
+ for l in range(len(new_trcback) - 1, 0, -1):
+ if new_trcback[l][5]:
+ self.source = new_trcback[l][7]
+ self.lineno = new_trcback[l][5]
+ break
+ else:
+ if new_trcback:
+ try:
+ # A normal .py file (not a Template)
+ fp = open(new_trcback[-1][0], "rb")
+ encoding = util.parse_encoding(fp)
+ if compat.py3k and not encoding:
+ encoding = "utf-8"
+ fp.seek(0)
+ self.source = fp.read()
+ fp.close()
+ if encoding:
+ self.source = self.source.decode(encoding)
+ except IOError:
+ self.source = ""
+ self.lineno = new_trcback[-1][1]
+ return new_trcback
+
+
+def text_error_template(lookup=None):
+ """Provides a template that renders a stack trace in a similar format to
+ the Python interpreter, substituting source template filenames, line
+ numbers and code for that of the originating source template, as
+ applicable.
+
+ """
+ import mako.template
+
+ return mako.template.Template(
+ r"""
+<%page args="error=None, traceback=None"/>
+<%!
+ from mako.exceptions import RichTraceback
+%>\
+<%
+ tback = RichTraceback(error=error, traceback=traceback)
+%>\
+Traceback (most recent call last):
+% for (filename, lineno, function, line) in tback.traceback:
+ File "${filename}", line ${lineno}, in ${function or '?'}
+ ${line | trim}
+% endfor
+${tback.errorname}: ${tback.message}
+"""
+ )
+
+
+def _install_pygments():
+ global syntax_highlight, pygments_html_formatter
+ from mako.ext.pygmentplugin import syntax_highlight # noqa
+ from mako.ext.pygmentplugin import pygments_html_formatter # noqa
+
+
+def _install_fallback():
+ global syntax_highlight, pygments_html_formatter
+ from mako.filters import html_escape
+
+ pygments_html_formatter = None
+
+ def syntax_highlight(filename="", language=None):
+ return html_escape
+
+
+def _install_highlighting():
+ try:
+ _install_pygments()
+ except ImportError:
+ _install_fallback()
+
+
+_install_highlighting()
+
+
+def html_error_template():
+ """Provides a template that renders a stack trace in an HTML format,
+ providing an excerpt of code as well as substituting source template
+ filenames, line numbers and code for that of the originating source
+ template, as applicable.
+
+ The template's default ``encoding_errors`` value is
+ ``'htmlentityreplace'``. The template has two options. With the
+ ``full`` option disabled, only a section of an HTML document is
+ returned. With the ``css`` option disabled, the default stylesheet
+ won't be included.
+
+ """
+ import mako.template
+
+ return mako.template.Template(
+ r"""
+<%!
+ from mako.exceptions import RichTraceback, syntax_highlight,\
+ pygments_html_formatter
+%>
+<%page args="full=True, css=True, error=None, traceback=None"/>
+% if full:
+<html>
+<head>
+ <title>Mako Runtime Error</title>
+% endif
+% if css:
+ <style>
+ body { font-family:verdana; margin:10px 30px 10px 30px;}
+ .stacktrace { margin:5px 5px 5px 5px; }
+ .highlight { padding:0px 10px 0px 10px; background-color:#9F9FDF; }
+ .nonhighlight { padding:0px; background-color:#DFDFDF; }
+ .sample { padding:10px; margin:10px 10px 10px 10px;
+ font-family:monospace; }
+ .sampleline { padding:0px 10px 0px 10px; }
+ .sourceline { margin:5px 5px 10px 5px; font-family:monospace;}
+ .location { font-size:80%; }
+ .highlight { white-space:pre; }
+ .sampleline { white-space:pre; }
+
+ % if pygments_html_formatter:
+ ${pygments_html_formatter.get_style_defs()}
+ .linenos { min-width: 2.5em; text-align: right; }
+ pre { margin: 0; }
+ .syntax-highlighted { padding: 0 10px; }
+ .syntax-highlightedtable { border-spacing: 1px; }
+ .nonhighlight { border-top: 1px solid #DFDFDF;
+ border-bottom: 1px solid #DFDFDF; }
+ .stacktrace .nonhighlight { margin: 5px 15px 10px; }
+ .sourceline { margin: 0 0; font-family:monospace; }
+ .code { background-color: #F8F8F8; width: 100%; }
+ .error .code { background-color: #FFBDBD; }
+ .error .syntax-highlighted { background-color: #FFBDBD; }
+ % endif
+
+ </style>
+% endif
+% if full:
+</head>
+<body>
+% endif
+
+<h2>Error !</h2>
+<%
+ tback = RichTraceback(error=error, traceback=traceback)
+ src = tback.source
+ line = tback.lineno
+ if src:
+ lines = src.split('\n')
+ else:
+ lines = None
+%>
+<h3>${tback.errorname}: ${tback.message|h}</h3>
+
+% if lines:
+ <div class="sample">
+ <div class="nonhighlight">
+% for index in range(max(0, line-4),min(len(lines), line+5)):
+ <%
+ if pygments_html_formatter:
+ pygments_html_formatter.linenostart = index + 1
+ %>
+ % if index + 1 == line:
+ <%
+ if pygments_html_formatter:
+ old_cssclass = pygments_html_formatter.cssclass
+ pygments_html_formatter.cssclass = 'error ' + old_cssclass
+ %>
+ ${lines[index] | syntax_highlight(language='mako')}
+ <%
+ if pygments_html_formatter:
+ pygments_html_formatter.cssclass = old_cssclass
+ %>
+ % else:
+ ${lines[index] | syntax_highlight(language='mako')}
+ % endif
+% endfor
+ </div>
+ </div>
+% endif
+
+<div class="stacktrace">
+% for (filename, lineno, function, line) in tback.reverse_traceback:
+ <div class="location">${filename}, line ${lineno}:</div>
+ <div class="nonhighlight">
+ <%
+ if pygments_html_formatter:
+ pygments_html_formatter.linenostart = lineno
+ %>
+ <div class="sourceline">${line | syntax_highlight(filename)}</div>
+ </div>
+% endfor
+</div>
+
+% if full:
+</body>
+</html>
+% endif
+""",
+ output_encoding=sys.getdefaultencoding(),
+ encoding_errors="htmlentityreplace",
+ )
diff --git a/third_party/python/Mako/mako/ext/__init__.py b/third_party/python/Mako/mako/ext/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/Mako/mako/ext/__init__.py
diff --git a/third_party/python/Mako/mako/ext/autohandler.py b/third_party/python/Mako/mako/ext/autohandler.py
new file mode 100644
index 0000000000..8b1324ef00
--- /dev/null
+++ b/third_party/python/Mako/mako/ext/autohandler.py
@@ -0,0 +1,70 @@
+# ext/autohandler.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""adds autohandler functionality to Mako templates.
+
+requires that the TemplateLookup class is used with templates.
+
+usage::
+
+ <%!
+ from mako.ext.autohandler import autohandler
+ %>
+ <%inherit file="${autohandler(template, context)}"/>
+
+
+or with custom autohandler filename::
+
+ <%!
+ from mako.ext.autohandler import autohandler
+ %>
+ <%inherit file="${autohandler(template, context, name='somefilename')}"/>
+
+"""
+
+import os
+import posixpath
+import re
+
+
+def autohandler(template, context, name="autohandler"):
+ lookup = context.lookup
+ _template_uri = template.module._template_uri
+ if not lookup.filesystem_checks:
+ try:
+ return lookup._uri_cache[(autohandler, _template_uri, name)]
+ except KeyError:
+ pass
+
+ tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name]
+ while len(tokens):
+ path = "/" + "/".join(tokens)
+ if path != _template_uri and _file_exists(lookup, path):
+ if not lookup.filesystem_checks:
+ return lookup._uri_cache.setdefault(
+ (autohandler, _template_uri, name), path
+ )
+ else:
+ return path
+ if len(tokens) == 1:
+ break
+ tokens[-2:] = [name]
+
+ if not lookup.filesystem_checks:
+ return lookup._uri_cache.setdefault(
+ (autohandler, _template_uri, name), None
+ )
+ else:
+ return None
+
+
+def _file_exists(lookup, path):
+ psub = re.sub(r"^/", "", path)
+ for d in lookup.directories:
+ if os.path.exists(d + "/" + psub):
+ return True
+ else:
+ return False
diff --git a/third_party/python/Mako/mako/ext/babelplugin.py b/third_party/python/Mako/mako/ext/babelplugin.py
new file mode 100644
index 0000000000..76bbc5b03f
--- /dev/null
+++ b/third_party/python/Mako/mako/ext/babelplugin.py
@@ -0,0 +1,58 @@
+# ext/babelplugin.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""gettext message extraction via Babel: http://babel.edgewall.org/"""
+from babel.messages.extract import extract_python
+
+from mako.ext.extract import MessageExtractor
+
+
+class BabelMakoExtractor(MessageExtractor):
+ def __init__(self, keywords, comment_tags, options):
+ self.keywords = keywords
+ self.options = options
+ self.config = {
+ "comment-tags": u" ".join(comment_tags),
+ "encoding": options.get(
+ "input_encoding", options.get("encoding", None)
+ ),
+ }
+ super(BabelMakoExtractor, self).__init__()
+
+ def __call__(self, fileobj):
+ return self.process_file(fileobj)
+
+ def process_python(self, code, code_lineno, translator_strings):
+ comment_tags = self.config["comment-tags"]
+ for (
+ lineno,
+ funcname,
+ messages,
+ python_translator_comments,
+ ) in extract_python(code, self.keywords, comment_tags, self.options):
+ yield (
+ code_lineno + (lineno - 1),
+ funcname,
+ messages,
+ translator_strings + python_translator_comments,
+ )
+
+
+def extract(fileobj, keywords, comment_tags, options):
+ """Extract messages from Mako templates.
+
+ :param fileobj: the file-like object the messages should be extracted from
+ :param keywords: a list of keywords (i.e. function names) that should be
+ recognized as translation functions
+ :param comment_tags: a list of translator tags to search for and include
+ in the results
+ :param options: a dictionary of additional options (optional)
+ :return: an iterator over ``(lineno, funcname, message, comments)`` tuples
+ :rtype: ``iterator``
+ """
+ extractor = BabelMakoExtractor(keywords, comment_tags, options)
+ for message in extractor(fileobj):
+ yield message
diff --git a/third_party/python/Mako/mako/ext/beaker_cache.py b/third_party/python/Mako/mako/ext/beaker_cache.py
new file mode 100644
index 0000000000..f65ce43ae8
--- /dev/null
+++ b/third_party/python/Mako/mako/ext/beaker_cache.py
@@ -0,0 +1,82 @@
+# ext/beaker_cache.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Provide a :class:`.CacheImpl` for the Beaker caching system."""
+
+from mako import exceptions
+from mako.cache import CacheImpl
+
+try:
+ from beaker import cache as beaker_cache
+except:
+ has_beaker = False
+else:
+ has_beaker = True
+
+_beaker_cache = None
+
+
+class BeakerCacheImpl(CacheImpl):
+
+ """A :class:`.CacheImpl` provided for the Beaker caching system.
+
+ This plugin is used by default, based on the default
+ value of ``'beaker'`` for the ``cache_impl`` parameter of the
+ :class:`.Template` or :class:`.TemplateLookup` classes.
+
+ """
+
+ def __init__(self, cache):
+ if not has_beaker:
+ raise exceptions.RuntimeException(
+ "Can't initialize Beaker plugin; Beaker is not installed."
+ )
+ global _beaker_cache
+ if _beaker_cache is None:
+ if "manager" in cache.template.cache_args:
+ _beaker_cache = cache.template.cache_args["manager"]
+ else:
+ _beaker_cache = beaker_cache.CacheManager()
+ super(BeakerCacheImpl, self).__init__(cache)
+
+ def _get_cache(self, **kw):
+ expiretime = kw.pop("timeout", None)
+ if "dir" in kw:
+ kw["data_dir"] = kw.pop("dir")
+ elif self.cache.template.module_directory:
+ kw["data_dir"] = self.cache.template.module_directory
+
+ if "manager" in kw:
+ kw.pop("manager")
+
+ if kw.get("type") == "memcached":
+ kw["type"] = "ext:memcached"
+
+ if "region" in kw:
+ region = kw.pop("region")
+ cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw)
+ else:
+ cache = _beaker_cache.get_cache(self.cache.id, **kw)
+ cache_args = {"starttime": self.cache.starttime}
+ if expiretime:
+ cache_args["expiretime"] = expiretime
+ return cache, cache_args
+
+ def get_or_create(self, key, creation_function, **kw):
+ cache, kw = self._get_cache(**kw)
+ return cache.get(key, createfunc=creation_function, **kw)
+
+ def put(self, key, value, **kw):
+ cache, kw = self._get_cache(**kw)
+ cache.put(key, value, **kw)
+
+ def get(self, key, **kw):
+ cache, kw = self._get_cache(**kw)
+ return cache.get(key, **kw)
+
+ def invalidate(self, key, **kw):
+ cache, kw = self._get_cache(**kw)
+ cache.remove_value(key, **kw)
diff --git a/third_party/python/Mako/mako/ext/extract.py b/third_party/python/Mako/mako/ext/extract.py
new file mode 100644
index 0000000000..ad2348a523
--- /dev/null
+++ b/third_party/python/Mako/mako/ext/extract.py
@@ -0,0 +1,125 @@
+# ext/extract.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import re
+
+from mako import compat
+from mako import lexer
+from mako import parsetree
+
+
+class MessageExtractor(object):
+ def process_file(self, fileobj):
+ template_node = lexer.Lexer(
+ fileobj.read(), input_encoding=self.config["encoding"]
+ ).parse()
+ for extracted in self.extract_nodes(template_node.get_children()):
+ yield extracted
+
+ def extract_nodes(self, nodes):
+ translator_comments = []
+ in_translator_comments = False
+ input_encoding = self.config["encoding"] or "ascii"
+ comment_tags = list(
+ filter(None, re.split(r"\s+", self.config["comment-tags"]))
+ )
+
+ for node in nodes:
+ child_nodes = None
+ if (
+ in_translator_comments
+ and isinstance(node, parsetree.Text)
+ and not node.content.strip()
+ ):
+ # Ignore whitespace within translator comments
+ continue
+
+ if isinstance(node, parsetree.Comment):
+ value = node.text.strip()
+ if in_translator_comments:
+ translator_comments.extend(
+ self._split_comment(node.lineno, value)
+ )
+ continue
+ for comment_tag in comment_tags:
+ if value.startswith(comment_tag):
+ in_translator_comments = True
+ translator_comments.extend(
+ self._split_comment(node.lineno, value)
+ )
+ continue
+
+ if isinstance(node, parsetree.DefTag):
+ code = node.function_decl.code
+ child_nodes = node.nodes
+ elif isinstance(node, parsetree.BlockTag):
+ code = node.body_decl.code
+ child_nodes = node.nodes
+ elif isinstance(node, parsetree.CallTag):
+ code = node.code.code
+ child_nodes = node.nodes
+ elif isinstance(node, parsetree.PageTag):
+ code = node.body_decl.code
+ elif isinstance(node, parsetree.CallNamespaceTag):
+ code = node.expression
+ child_nodes = node.nodes
+ elif isinstance(node, parsetree.ControlLine):
+ if node.isend:
+ in_translator_comments = False
+ continue
+ code = node.text
+ elif isinstance(node, parsetree.Code):
+ in_translator_comments = False
+ code = node.code.code
+ elif isinstance(node, parsetree.Expression):
+ code = node.code.code
+ else:
+ continue
+
+ # Comments don't apply unless they immediately precede the message
+ if (
+ translator_comments
+ and translator_comments[-1][0] < node.lineno - 1
+ ):
+ translator_comments = []
+
+ translator_strings = [
+ comment[1] for comment in translator_comments
+ ]
+
+ if isinstance(code, compat.text_type):
+ code = code.encode(input_encoding, "backslashreplace")
+
+ used_translator_comments = False
+ # We add extra newline to work around a pybabel bug
+ # (see python-babel/babel#274, parse_encoding dies if the first
+ # input string of the input is non-ascii)
+ # Also, because we added it, we have to subtract one from
+ # node.lineno
+ code = compat.byte_buffer(compat.b("\n") + code)
+
+ for message in self.process_python(
+ code, node.lineno - 1, translator_strings
+ ):
+ yield message
+ used_translator_comments = True
+
+ if used_translator_comments:
+ translator_comments = []
+ in_translator_comments = False
+
+ if child_nodes:
+ for extracted in self.extract_nodes(child_nodes):
+ yield extracted
+
+ @staticmethod
+ def _split_comment(lineno, comment):
+ """Return the multiline comment at lineno split into a list of
+ comment line numbers and the accompanying comment line"""
+ return [
+ (lineno + index, line)
+ for index, line in enumerate(comment.splitlines())
+ ]
diff --git a/third_party/python/Mako/mako/ext/linguaplugin.py b/third_party/python/Mako/mako/ext/linguaplugin.py
new file mode 100644
index 0000000000..0f6d165a2f
--- /dev/null
+++ b/third_party/python/Mako/mako/ext/linguaplugin.py
@@ -0,0 +1,57 @@
+# ext/linguaplugin.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import io
+
+from lingua.extractors import Extractor
+from lingua.extractors import get_extractor
+from lingua.extractors import Message
+
+from mako import compat
+from mako.ext.extract import MessageExtractor
+
+
+class LinguaMakoExtractor(Extractor, MessageExtractor):
+
+ """Mako templates"""
+
+ extensions = [".mako"]
+ default_config = {"encoding": "utf-8", "comment-tags": ""}
+
+ def __call__(self, filename, options, fileobj=None):
+ self.options = options
+ self.filename = filename
+ self.python_extractor = get_extractor("x.py")
+ if fileobj is None:
+ fileobj = open(filename, "rb")
+ return self.process_file(fileobj)
+
+ def process_python(self, code, code_lineno, translator_strings):
+ source = code.getvalue().strip()
+ if source.endswith(compat.b(":")):
+ if source in (
+ compat.b("try:"),
+ compat.b("else:"),
+ ) or source.startswith(compat.b("except")):
+ source = compat.b("") # Ignore try/except and else
+ elif source.startswith(compat.b("elif")):
+ source = source[2:] # Replace "elif" with "if"
+ source += compat.b("pass")
+ code = io.BytesIO(source)
+ for msg in self.python_extractor(
+ self.filename, self.options, code, code_lineno - 1
+ ):
+ if translator_strings:
+ msg = Message(
+ msg.msgctxt,
+ msg.msgid,
+ msg.msgid_plural,
+ msg.flags,
+ compat.u(" ").join(translator_strings + [msg.comment]),
+ msg.tcomment,
+ msg.location,
+ )
+ yield msg
diff --git a/third_party/python/Mako/mako/ext/preprocessors.py b/third_party/python/Mako/mako/ext/preprocessors.py
new file mode 100644
index 0000000000..9cc0621482
--- /dev/null
+++ b/third_party/python/Mako/mako/ext/preprocessors.py
@@ -0,0 +1,20 @@
+# ext/preprocessors.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""preprocessing functions, used with the 'preprocessor'
+argument on Template, TemplateLookup"""
+
+import re
+
+
+def convert_comments(text):
+ """preprocess old style comments.
+
+ example:
+
+ from mako.ext.preprocessors import convert_comments
+ t = Template(..., preprocessor=convert_comments)"""
+ return re.sub(r"(?<=\n)\s*#[^#]", "##", text)
diff --git a/third_party/python/Mako/mako/ext/pygmentplugin.py b/third_party/python/Mako/mako/ext/pygmentplugin.py
new file mode 100644
index 0000000000..943a67a49c
--- /dev/null
+++ b/third_party/python/Mako/mako/ext/pygmentplugin.py
@@ -0,0 +1,157 @@
+# ext/pygmentplugin.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from pygments import highlight
+from pygments.formatters.html import HtmlFormatter
+from pygments.lexer import bygroups
+from pygments.lexer import DelegatingLexer
+from pygments.lexer import include
+from pygments.lexer import RegexLexer
+from pygments.lexer import using
+from pygments.lexers.agile import Python3Lexer
+from pygments.lexers.agile import PythonLexer
+from pygments.lexers.web import CssLexer
+from pygments.lexers.web import HtmlLexer
+from pygments.lexers.web import JavascriptLexer
+from pygments.lexers.web import XmlLexer
+from pygments.token import Comment
+from pygments.token import Keyword
+from pygments.token import Name
+from pygments.token import Operator
+from pygments.token import Other
+from pygments.token import String
+from pygments.token import Text
+
+from mako import compat
+
+
+class MakoLexer(RegexLexer):
+ name = "Mako"
+ aliases = ["mako"]
+ filenames = ["*.mao"]
+
+ tokens = {
+ "root": [
+ (
+ r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)",
+ bygroups(Text, Comment.Preproc, Keyword, Other),
+ ),
+ (
+ r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)",
+ bygroups(Text, Comment.Preproc, using(PythonLexer), Other),
+ ),
+ (
+ r"(\s*)(##[^\n]*)(\n|\Z)",
+ bygroups(Text, Comment.Preproc, Other),
+ ),
+ (r"""(?s)<%doc>.*?</%doc>""", Comment.Preproc),
+ (
+ r"(<%)([\w\.\:]+)",
+ bygroups(Comment.Preproc, Name.Builtin),
+ "tag",
+ ),
+ (
+ r"(</%)([\w\.\:]+)(>)",
+ bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc),
+ ),
+ (r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"),
+ (
+ r"(?s)(<%(?:!?))(.*?)(%>)",
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
+ ),
+ (
+ r"(\$\{)(.*?)(\})",
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
+ ),
+ (
+ r"""(?sx)
+ (.+?) # anything, followed by:
+ (?:
+ (?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line
+ (?=\#\*) | # multiline comment
+ (?=</?%) | # a python block
+ # call start or end
+ (?=\$\{) | # a substitution
+ (?<=\n)(?=\s*%) |
+ # - don't consume
+ (\\\n) | # an escaped newline
+ \Z # end of string
+ )
+ """,
+ bygroups(Other, Operator),
+ ),
+ (r"\s+", Text),
+ ],
+ "ondeftags": [
+ (r"<%", Comment.Preproc),
+ (r"(?<=<%)(include|inherit|namespace|page)", Name.Builtin),
+ include("tag"),
+ ],
+ "tag": [
+ (r'((?:\w+)\s*=)\s*(".*?")', bygroups(Name.Attribute, String)),
+ (r"/?\s*>", Comment.Preproc, "#pop"),
+ (r"\s+", Text),
+ ],
+ "attr": [
+ ('".*?"', String, "#pop"),
+ ("'.*?'", String, "#pop"),
+ (r"[^\s>]+", String, "#pop"),
+ ],
+ }
+
+
+class MakoHtmlLexer(DelegatingLexer):
+ name = "HTML+Mako"
+ aliases = ["html+mako"]
+
+ def __init__(self, **options):
+ super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, **options)
+
+
+class MakoXmlLexer(DelegatingLexer):
+ name = "XML+Mako"
+ aliases = ["xml+mako"]
+
+ def __init__(self, **options):
+ super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, **options)
+
+
+class MakoJavascriptLexer(DelegatingLexer):
+ name = "JavaScript+Mako"
+ aliases = ["js+mako", "javascript+mako"]
+
+ def __init__(self, **options):
+ super(MakoJavascriptLexer, self).__init__(
+ JavascriptLexer, MakoLexer, **options
+ )
+
+
+class MakoCssLexer(DelegatingLexer):
+ name = "CSS+Mako"
+ aliases = ["css+mako"]
+
+ def __init__(self, **options):
+ super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, **options)
+
+
+pygments_html_formatter = HtmlFormatter(
+ cssclass="syntax-highlighted", linenos=True
+)
+
+
+def syntax_highlight(filename="", language=None):
+ mako_lexer = MakoLexer()
+ if compat.py3k:
+ python_lexer = Python3Lexer()
+ else:
+ python_lexer = PythonLexer()
+ if filename.startswith("memory:") or language == "mako":
+ return lambda string: highlight(
+ string, mako_lexer, pygments_html_formatter
+ )
+ return lambda string: highlight(
+ string, python_lexer, pygments_html_formatter
+ )
diff --git a/third_party/python/Mako/mako/ext/turbogears.py b/third_party/python/Mako/mako/ext/turbogears.py
new file mode 100644
index 0000000000..722a6b4b40
--- /dev/null
+++ b/third_party/python/Mako/mako/ext/turbogears.py
@@ -0,0 +1,61 @@
+# ext/turbogears.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from mako import compat
+from mako.lookup import TemplateLookup
+from mako.template import Template
+
+
+class TGPlugin(object):
+
+ """TurboGears compatible Template Plugin."""
+
+ def __init__(self, extra_vars_func=None, options=None, extension="mak"):
+ self.extra_vars_func = extra_vars_func
+ self.extension = extension
+ if not options:
+ options = {}
+
+ # Pull the options out and initialize the lookup
+ lookup_options = {}
+ for k, v in options.items():
+ if k.startswith("mako."):
+ lookup_options[k[5:]] = v
+ elif k in ["directories", "filesystem_checks", "module_directory"]:
+ lookup_options[k] = v
+ self.lookup = TemplateLookup(**lookup_options)
+
+ self.tmpl_options = {}
+ # transfer lookup args to template args, based on those available
+ # in getargspec
+ for kw in compat.inspect_getargspec(Template.__init__)[0]:
+ if kw in lookup_options:
+ self.tmpl_options[kw] = lookup_options[kw]
+
+ def load_template(self, templatename, template_string=None):
+ """Loads a template from a file or a string"""
+ if template_string is not None:
+ return Template(template_string, **self.tmpl_options)
+ # Translate TG dot notation to normal / template path
+ if "/" not in templatename:
+ templatename = (
+ "/" + templatename.replace(".", "/") + "." + self.extension
+ )
+
+ # Lookup template
+ return self.lookup.get_template(templatename)
+
+ def render(
+ self, info, format="html", fragment=False, template=None # noqa
+ ):
+ if isinstance(template, compat.string_types):
+ template = self.load_template(template)
+
+ # Load extra vars func if provided
+ if self.extra_vars_func:
+ info.update(self.extra_vars_func())
+
+ return template.render(**info)
diff --git a/third_party/python/Mako/mako/filters.py b/third_party/python/Mako/mako/filters.py
new file mode 100644
index 0000000000..0ae33ff486
--- /dev/null
+++ b/third_party/python/Mako/mako/filters.py
@@ -0,0 +1,219 @@
+# mako/filters.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+
+import codecs
+import re
+
+from mako import compat
+from mako.compat import codepoint2name
+from mako.compat import name2codepoint
+from mako.compat import quote_plus
+from mako.compat import unquote_plus
+
+xml_escapes = {
+ "&": "&amp;",
+ ">": "&gt;",
+ "<": "&lt;",
+ '"': "&#34;", # also &quot; in html-only
+ "'": "&#39;", # also &apos; in html-only
+}
+
+# XXX: &quot; is valid in HTML and XML
+# &apos; is not valid HTML, but is valid XML
+
+
+def legacy_html_escape(s):
+ """legacy HTML escape for non-unicode mode."""
+ s = s.replace("&", "&amp;")
+ s = s.replace(">", "&gt;")
+ s = s.replace("<", "&lt;")
+ s = s.replace('"', "&#34;")
+ s = s.replace("'", "&#39;")
+ return s
+
+
+try:
+ import markupsafe
+
+ html_escape = markupsafe.escape
+except ImportError:
+ html_escape = legacy_html_escape
+
+
+def xml_escape(string):
+ return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string)
+
+
+def url_escape(string):
+ # convert into a list of octets
+ string = string.encode("utf8")
+ return quote_plus(string)
+
+
+def legacy_url_escape(string):
+ # convert into a list of octets
+ return quote_plus(string)
+
+
+def url_unescape(string):
+ text = unquote_plus(string)
+ if not is_ascii_str(text):
+ text = text.decode("utf8")
+ return text
+
+
+def trim(string):
+ return string.strip()
+
+
+class Decode(object):
+ def __getattr__(self, key):
+ def decode(x):
+ if isinstance(x, compat.text_type):
+ return x
+ elif not isinstance(x, compat.binary_type):
+ return decode(str(x))
+ else:
+ return compat.text_type(x, encoding=key)
+
+ return decode
+
+
+decode = Decode()
+
+
+_ASCII_re = re.compile(r"\A[\x00-\x7f]*\Z")
+
+
+def is_ascii_str(text):
+ return isinstance(text, str) and _ASCII_re.match(text)
+
+
+################################################################
+
+
+class XMLEntityEscaper(object):
+ def __init__(self, codepoint2name, name2codepoint):
+ self.codepoint2entity = dict(
+ [
+ (c, compat.text_type("&%s;" % n))
+ for c, n in codepoint2name.items()
+ ]
+ )
+ self.name2codepoint = name2codepoint
+
+ def escape_entities(self, text):
+ """Replace characters with their character entity references.
+
+ Only characters corresponding to a named entity are replaced.
+ """
+ return compat.text_type(text).translate(self.codepoint2entity)
+
+ def __escape(self, m):
+ codepoint = ord(m.group())
+ try:
+ return self.codepoint2entity[codepoint]
+ except (KeyError, IndexError):
+ return "&#x%X;" % codepoint
+
+ __escapable = re.compile(r'["&<>]|[^\x00-\x7f]')
+
+ def escape(self, text):
+ """Replace characters with their character references.
+
+ Replace characters by their named entity references.
+ Non-ASCII characters, if they do not have a named entity reference,
+ are replaced by numerical character references.
+
+ The return value is guaranteed to be ASCII.
+ """
+ return self.__escapable.sub(
+ self.__escape, compat.text_type(text)
+ ).encode("ascii")
+
+ # XXX: This regexp will not match all valid XML entity names__.
+ # (It punts on details involving involving CombiningChars and Extenders.)
+ #
+ # .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef
+ __characterrefs = re.compile(
+ r"""& (?:
+ \#(\d+)
+ | \#x([\da-f]+)
+ | ( (?!\d) [:\w] [-.:\w]+ )
+ ) ;""",
+ re.X | re.UNICODE,
+ )
+
+ def __unescape(self, m):
+ dval, hval, name = m.groups()
+ if dval:
+ codepoint = int(dval)
+ elif hval:
+ codepoint = int(hval, 16)
+ else:
+ codepoint = self.name2codepoint.get(name, 0xFFFD)
+ # U+FFFD = "REPLACEMENT CHARACTER"
+ if codepoint < 128:
+ return chr(codepoint)
+ return chr(codepoint)
+
+ def unescape(self, text):
+ """Unescape character references.
+
+ All character references (both entity references and numerical
+ character references) are unescaped.
+ """
+ return self.__characterrefs.sub(self.__unescape, text)
+
+
+_html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint)
+
+html_entities_escape = _html_entities_escaper.escape_entities
+html_entities_unescape = _html_entities_escaper.unescape
+
+
+def htmlentityreplace_errors(ex):
+ """An encoding error handler.
+
+ This python codecs error handler replaces unencodable
+ characters with HTML entities, or, if no HTML entity exists for
+ the character, XML character references::
+
+ >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace')
+ 'The cost was &euro;12.'
+ """
+ if isinstance(ex, UnicodeEncodeError):
+ # Handle encoding errors
+ bad_text = ex.object[ex.start : ex.end]
+ text = _html_entities_escaper.escape(bad_text)
+ return (compat.text_type(text), ex.end)
+ raise ex
+
+
+codecs.register_error("htmlentityreplace", htmlentityreplace_errors)
+
+
+# TODO: options to make this dynamic per-compilation will be added in a later
+# release
+DEFAULT_ESCAPES = {
+ "x": "filters.xml_escape",
+ "h": "filters.html_escape",
+ "u": "filters.url_escape",
+ "trim": "filters.trim",
+ "entity": "filters.html_entities_escape",
+ "unicode": "unicode",
+ "decode": "decode",
+ "str": "str",
+ "n": "n",
+}
+
+if compat.py3k:
+ DEFAULT_ESCAPES.update({"unicode": "str"})
+
+NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy()
+NON_UNICODE_ESCAPES["h"] = "filters.legacy_html_escape"
+NON_UNICODE_ESCAPES["u"] = "filters.legacy_url_escape"
diff --git a/third_party/python/Mako/mako/lexer.py b/third_party/python/Mako/mako/lexer.py
new file mode 100644
index 0000000000..a02b57f8a1
--- /dev/null
+++ b/third_party/python/Mako/mako/lexer.py
@@ -0,0 +1,490 @@
+# mako/lexer.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""provides the Lexer class for parsing template strings into parse trees."""
+
+import codecs
+import re
+
+from mako import compat
+from mako import exceptions
+from mako import parsetree
+from mako.pygen import adjust_whitespace
+
+_regexp_cache = {}
+
+
+class Lexer(object):
+ def __init__(
+ self,
+ text,
+ filename=None,
+ disable_unicode=False,
+ input_encoding=None,
+ preprocessor=None,
+ ):
+ self.text = text
+ self.filename = filename
+ self.template = parsetree.TemplateNode(self.filename)
+ self.matched_lineno = 1
+ self.matched_charpos = 0
+ self.lineno = 1
+ self.match_position = 0
+ self.tag = []
+ self.control_line = []
+ self.ternary_stack = []
+ self.disable_unicode = disable_unicode
+ self.encoding = input_encoding
+
+ if compat.py3k and disable_unicode:
+ raise exceptions.UnsupportedError(
+ "Mako for Python 3 does not " "support disabling Unicode"
+ )
+
+ if preprocessor is None:
+ self.preprocessor = []
+ elif not hasattr(preprocessor, "__iter__"):
+ self.preprocessor = [preprocessor]
+ else:
+ self.preprocessor = preprocessor
+
+ @property
+ def exception_kwargs(self):
+ return {
+ "source": self.text,
+ "lineno": self.matched_lineno,
+ "pos": self.matched_charpos,
+ "filename": self.filename,
+ }
+
+ def match(self, regexp, flags=None):
+ """compile the given regexp, cache the reg, and call match_reg()."""
+
+ try:
+ reg = _regexp_cache[(regexp, flags)]
+ except KeyError:
+ if flags:
+ reg = re.compile(regexp, flags)
+ else:
+ reg = re.compile(regexp)
+ _regexp_cache[(regexp, flags)] = reg
+
+ return self.match_reg(reg)
+
+ def match_reg(self, reg):
+ """match the given regular expression object to the current text
+ position.
+
+ if a match occurs, update the current text and line position.
+
+ """
+
+ mp = self.match_position
+
+ match = reg.match(self.text, self.match_position)
+ if match:
+ (start, end) = match.span()
+ if end == start:
+ self.match_position = end + 1
+ else:
+ self.match_position = end
+ self.matched_lineno = self.lineno
+ lines = re.findall(r"\n", self.text[mp : self.match_position])
+ cp = mp - 1
+ while cp >= 0 and cp < self.textlength and self.text[cp] != "\n":
+ cp -= 1
+ self.matched_charpos = mp - cp
+ self.lineno += len(lines)
+ # print "MATCHED:", match.group(0), "LINE START:",
+ # self.matched_lineno, "LINE END:", self.lineno
+ # print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \
+ # (match and "TRUE" or "FALSE")
+ return match
+
+ def parse_until_text(self, watch_nesting, *text):
+ startpos = self.match_position
+ text_re = r"|".join(text)
+ brace_level = 0
+ paren_level = 0
+ bracket_level = 0
+ while True:
+ match = self.match(r"#.*\n")
+ if match:
+ continue
+ match = self.match(
+ r"(\"\"\"|\'\'\'|\"|\')[^\\]*?(\\.[^\\]*?)*\1", re.S
+ )
+ if match:
+ continue
+ match = self.match(r"(%s)" % text_re)
+ if match and not (
+ watch_nesting
+ and (brace_level > 0 or paren_level > 0 or bracket_level > 0)
+ ):
+ return (
+ self.text[
+ startpos : self.match_position - len(match.group(1))
+ ],
+ match.group(1),
+ )
+ elif not match:
+ match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S)
+ if match:
+ brace_level += match.group(1).count("{")
+ brace_level -= match.group(1).count("}")
+ paren_level += match.group(1).count("(")
+ paren_level -= match.group(1).count(")")
+ bracket_level += match.group(1).count("[")
+ bracket_level -= match.group(1).count("]")
+ continue
+ raise exceptions.SyntaxException(
+ "Expected: %s" % ",".join(text), **self.exception_kwargs
+ )
+
+ def append_node(self, nodecls, *args, **kwargs):
+ kwargs.setdefault("source", self.text)
+ kwargs.setdefault("lineno", self.matched_lineno)
+ kwargs.setdefault("pos", self.matched_charpos)
+ kwargs["filename"] = self.filename
+ node = nodecls(*args, **kwargs)
+ if len(self.tag):
+ self.tag[-1].nodes.append(node)
+ else:
+ self.template.nodes.append(node)
+ # build a set of child nodes for the control line
+ # (used for loop variable detection)
+ # also build a set of child nodes on ternary control lines
+ # (used for determining if a pass needs to be auto-inserted
+ if self.control_line:
+ control_frame = self.control_line[-1]
+ control_frame.nodes.append(node)
+ if not (
+ isinstance(node, parsetree.ControlLine)
+ and control_frame.is_ternary(node.keyword)
+ ):
+ if self.ternary_stack and self.ternary_stack[-1]:
+ self.ternary_stack[-1][-1].nodes.append(node)
+ if isinstance(node, parsetree.Tag):
+ if len(self.tag):
+ node.parent = self.tag[-1]
+ self.tag.append(node)
+ elif isinstance(node, parsetree.ControlLine):
+ if node.isend:
+ self.control_line.pop()
+ self.ternary_stack.pop()
+ elif node.is_primary:
+ self.control_line.append(node)
+ self.ternary_stack.append([])
+ elif self.control_line and self.control_line[-1].is_ternary(
+ node.keyword
+ ):
+ self.ternary_stack[-1].append(node)
+ elif self.control_line and not self.control_line[-1].is_ternary(
+ node.keyword
+ ):
+ raise exceptions.SyntaxException(
+ "Keyword '%s' not a legal ternary for keyword '%s'"
+ % (node.keyword, self.control_line[-1].keyword),
+ **self.exception_kwargs
+ )
+
+ _coding_re = re.compile(r"#.*coding[:=]\s*([-\w.]+).*\r?\n")
+
+ def decode_raw_stream(self, text, decode_raw, known_encoding, filename):
+ """given string/unicode or bytes/string, determine encoding
+ from magic encoding comment, return body as unicode
+ or raw if decode_raw=False
+
+ """
+ if isinstance(text, compat.text_type):
+ m = self._coding_re.match(text)
+ encoding = m and m.group(1) or known_encoding or "ascii"
+ return encoding, text
+
+ if text.startswith(codecs.BOM_UTF8):
+ text = text[len(codecs.BOM_UTF8) :]
+ parsed_encoding = "utf-8"
+ m = self._coding_re.match(text.decode("utf-8", "ignore"))
+ if m is not None and m.group(1) != "utf-8":
+ raise exceptions.CompileException(
+ "Found utf-8 BOM in file, with conflicting "
+ "magic encoding comment of '%s'" % m.group(1),
+ text.decode("utf-8", "ignore"),
+ 0,
+ 0,
+ filename,
+ )
+ else:
+ m = self._coding_re.match(text.decode("utf-8", "ignore"))
+ if m:
+ parsed_encoding = m.group(1)
+ else:
+ parsed_encoding = known_encoding or "ascii"
+
+ if decode_raw:
+ try:
+ text = text.decode(parsed_encoding)
+ except UnicodeDecodeError:
+ raise exceptions.CompileException(
+ "Unicode decode operation of encoding '%s' failed"
+ % parsed_encoding,
+ text.decode("utf-8", "ignore"),
+ 0,
+ 0,
+ filename,
+ )
+
+ return parsed_encoding, text
+
+ def parse(self):
+ self.encoding, self.text = self.decode_raw_stream(
+ self.text, not self.disable_unicode, self.encoding, self.filename
+ )
+
+ for preproc in self.preprocessor:
+ self.text = preproc(self.text)
+
+ # push the match marker past the
+ # encoding comment.
+ self.match_reg(self._coding_re)
+
+ self.textlength = len(self.text)
+
+ while True:
+ if self.match_position > self.textlength:
+ break
+
+ if self.match_end():
+ break
+ if self.match_expression():
+ continue
+ if self.match_control_line():
+ continue
+ if self.match_comment():
+ continue
+ if self.match_tag_start():
+ continue
+ if self.match_tag_end():
+ continue
+ if self.match_python_block():
+ continue
+ if self.match_text():
+ continue
+
+ if self.match_position > self.textlength:
+ break
+ raise exceptions.CompileException("assertion failed")
+
+ if len(self.tag):
+ raise exceptions.SyntaxException(
+ "Unclosed tag: <%%%s>" % self.tag[-1].keyword,
+ **self.exception_kwargs
+ )
+ if len(self.control_line):
+ raise exceptions.SyntaxException(
+ "Unterminated control keyword: '%s'"
+ % self.control_line[-1].keyword,
+ self.text,
+ self.control_line[-1].lineno,
+ self.control_line[-1].pos,
+ self.filename,
+ )
+ return self.template
+
+ def match_tag_start(self):
+ match = self.match(
+ r"""
+ \<% # opening tag
+
+ ([\w\.\:]+) # keyword
+
+ ((?:\s+\w+|\s*=\s*|".*?"|'.*?')*) # attrname, = \
+ # sign, string expression
+
+ \s* # more whitespace
+
+ (/)?> # closing
+
+ """,
+ re.I | re.S | re.X,
+ )
+
+ if match:
+ keyword, attr, isend = match.groups()
+ self.keyword = keyword
+ attributes = {}
+ if attr:
+ for att in re.findall(
+ r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr
+ ):
+ key, val1, val2 = att
+ text = val1 or val2
+ text = text.replace("\r\n", "\n")
+ attributes[key] = text
+ self.append_node(parsetree.Tag, keyword, attributes)
+ if isend:
+ self.tag.pop()
+ else:
+ if keyword == "text":
+ match = self.match(r"(.*?)(?=\</%text>)", re.S)
+ if not match:
+ raise exceptions.SyntaxException(
+ "Unclosed tag: <%%%s>" % self.tag[-1].keyword,
+ **self.exception_kwargs
+ )
+ self.append_node(parsetree.Text, match.group(1))
+ return self.match_tag_end()
+ return True
+ else:
+ return False
+
+ def match_tag_end(self):
+ match = self.match(r"\</%[\t ]*(.+?)[\t ]*>")
+ if match:
+ if not len(self.tag):
+ raise exceptions.SyntaxException(
+ "Closing tag without opening tag: </%%%s>"
+ % match.group(1),
+ **self.exception_kwargs
+ )
+ elif self.tag[-1].keyword != match.group(1):
+ raise exceptions.SyntaxException(
+ "Closing tag </%%%s> does not match tag: <%%%s>"
+ % (match.group(1), self.tag[-1].keyword),
+ **self.exception_kwargs
+ )
+ self.tag.pop()
+ return True
+ else:
+ return False
+
+ def match_end(self):
+ match = self.match(r"\Z", re.S)
+ if match:
+ string = match.group()
+ if string:
+ return string
+ else:
+ return True
+ else:
+ return False
+
+ def match_text(self):
+ match = self.match(
+ r"""
+ (.*?) # anything, followed by:
+ (
+ (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based
+ # comment preceded by a
+ # consumed newline and whitespace
+ |
+ (?=\${) # an expression
+ |
+ (?=</?[%&]) # a substitution or block or call start or end
+ # - don't consume
+ |
+ (\\\r?\n) # an escaped newline - throw away
+ |
+ \Z # end of string
+ )""",
+ re.X | re.S,
+ )
+
+ if match:
+ text = match.group(1)
+ if text:
+ self.append_node(parsetree.Text, text)
+ return True
+ else:
+ return False
+
+ def match_python_block(self):
+ match = self.match(r"<%(!)?")
+ if match:
+ line, pos = self.matched_lineno, self.matched_charpos
+ text, end = self.parse_until_text(False, r"%>")
+ # the trailing newline helps
+ # compiler.parse() not complain about indentation
+ text = adjust_whitespace(text) + "\n"
+ self.append_node(
+ parsetree.Code,
+ text,
+ match.group(1) == "!",
+ lineno=line,
+ pos=pos,
+ )
+ return True
+ else:
+ return False
+
+ def match_expression(self):
+ match = self.match(r"\${")
+ if match:
+ line, pos = self.matched_lineno, self.matched_charpos
+ text, end = self.parse_until_text(True, r"\|", r"}")
+ if end == "|":
+ escapes, end = self.parse_until_text(True, r"}")
+ else:
+ escapes = ""
+ text = text.replace("\r\n", "\n")
+ self.append_node(
+ parsetree.Expression,
+ text,
+ escapes.strip(),
+ lineno=line,
+ pos=pos,
+ )
+ return True
+ else:
+ return False
+
+ def match_control_line(self):
+ match = self.match(
+ r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)"
+ r"(?:\r?\n|\Z)",
+ re.M,
+ )
+ if match:
+ operator = match.group(1)
+ text = match.group(2)
+ if operator == "%":
+ m2 = re.match(r"(end)?(\w+)\s*(.*)", text)
+ if not m2:
+ raise exceptions.SyntaxException(
+ "Invalid control line: '%s'" % text,
+ **self.exception_kwargs
+ )
+ isend, keyword = m2.group(1, 2)
+ isend = isend is not None
+
+ if isend:
+ if not len(self.control_line):
+ raise exceptions.SyntaxException(
+ "No starting keyword '%s' for '%s'"
+ % (keyword, text),
+ **self.exception_kwargs
+ )
+ elif self.control_line[-1].keyword != keyword:
+ raise exceptions.SyntaxException(
+ "Keyword '%s' doesn't match keyword '%s'"
+ % (text, self.control_line[-1].keyword),
+ **self.exception_kwargs
+ )
+ self.append_node(parsetree.ControlLine, keyword, isend, text)
+ else:
+ self.append_node(parsetree.Comment, text)
+ return True
+ else:
+ return False
+
+ def match_comment(self):
+ """matches the multiline version of a comment"""
+ match = self.match(r"<%doc>(.*?)</%doc>", re.S)
+ if match:
+ self.append_node(parsetree.Comment, match.group(1))
+ return True
+ else:
+ return False
diff --git a/third_party/python/Mako/mako/lookup.py b/third_party/python/Mako/mako/lookup.py
new file mode 100644
index 0000000000..476326d4d2
--- /dev/null
+++ b/third_party/python/Mako/mako/lookup.py
@@ -0,0 +1,372 @@
+# mako/lookup.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import os
+import posixpath
+import re
+import stat
+
+from mako import exceptions
+from mako import util
+from mako.template import Template
+
+try:
+ import threading
+except:
+ import dummy_threading as threading
+
+
+class TemplateCollection(object):
+
+ """Represent a collection of :class:`.Template` objects,
+ identifiable via URI.
+
+ A :class:`.TemplateCollection` is linked to the usage of
+ all template tags that address other templates, such
+ as ``<%include>``, ``<%namespace>``, and ``<%inherit>``.
+ The ``file`` attribute of each of those tags refers
+ to a string URI that is passed to that :class:`.Template`
+ object's :class:`.TemplateCollection` for resolution.
+
+ :class:`.TemplateCollection` is an abstract class,
+ with the usual default implementation being :class:`.TemplateLookup`.
+
+ """
+
+ def has_template(self, uri):
+ """Return ``True`` if this :class:`.TemplateLookup` is
+ capable of returning a :class:`.Template` object for the
+ given ``uri``.
+
+ :param uri: String URI of the template to be resolved.
+
+ """
+ try:
+ self.get_template(uri)
+ return True
+ except exceptions.TemplateLookupException:
+ return False
+
+ def get_template(self, uri, relativeto=None):
+ """Return a :class:`.Template` object corresponding to the given
+ ``uri``.
+
+ The default implementation raises
+ :class:`.NotImplementedError`. Implementations should
+ raise :class:`.TemplateLookupException` if the given ``uri``
+ cannot be resolved.
+
+ :param uri: String URI of the template to be resolved.
+ :param relativeto: if present, the given ``uri`` is assumed to
+ be relative to this URI.
+
+ """
+ raise NotImplementedError()
+
+ def filename_to_uri(self, uri, filename):
+ """Convert the given ``filename`` to a URI relative to
+ this :class:`.TemplateCollection`."""
+
+ return uri
+
+ def adjust_uri(self, uri, filename):
+ """Adjust the given ``uri`` based on the calling ``filename``.
+
+ When this method is called from the runtime, the
+ ``filename`` parameter is taken directly to the ``filename``
+ attribute of the calling template. Therefore a custom
+ :class:`.TemplateCollection` subclass can place any string
+ identifier desired in the ``filename`` parameter of the
+ :class:`.Template` objects it constructs and have them come back
+ here.
+
+ """
+ return uri
+
+
+class TemplateLookup(TemplateCollection):
+
+ """Represent a collection of templates that locates template source files
+ from the local filesystem.
+
+ The primary argument is the ``directories`` argument, the list of
+ directories to search:
+
+ .. sourcecode:: python
+
+ lookup = TemplateLookup(["/path/to/templates"])
+ some_template = lookup.get_template("/index.html")
+
+ The :class:`.TemplateLookup` can also be given :class:`.Template` objects
+ programatically using :meth:`.put_string` or :meth:`.put_template`:
+
+ .. sourcecode:: python
+
+ lookup = TemplateLookup()
+ lookup.put_string("base.html", '''
+ <html><body>${self.next()}</body></html>
+ ''')
+ lookup.put_string("hello.html", '''
+ <%include file='base.html'/>
+
+ Hello, world !
+ ''')
+
+
+ :param directories: A list of directory names which will be
+ searched for a particular template URI. The URI is appended
+ to each directory and the filesystem checked.
+
+ :param collection_size: Approximate size of the collection used
+ to store templates. If left at its default of ``-1``, the size
+ is unbounded, and a plain Python dictionary is used to
+ relate URI strings to :class:`.Template` instances.
+ Otherwise, a least-recently-used cache object is used which
+ will maintain the size of the collection approximately to
+ the number given.
+
+ :param filesystem_checks: When at its default value of ``True``,
+ each call to :meth:`.TemplateLookup.get_template()` will
+ compare the filesystem last modified time to the time in
+ which an existing :class:`.Template` object was created.
+ This allows the :class:`.TemplateLookup` to regenerate a
+ new :class:`.Template` whenever the original source has
+ been updated. Set this to ``False`` for a very minor
+ performance increase.
+
+ :param modulename_callable: A callable which, when present,
+ is passed the path of the source file as well as the
+ requested URI, and then returns the full path of the
+ generated Python module file. This is used to inject
+ alternate schemes for Python module location. If left at
+ its default of ``None``, the built in system of generation
+ based on ``module_directory`` plus ``uri`` is used.
+
+ All other keyword parameters available for
+ :class:`.Template` are mirrored here. When new
+ :class:`.Template` objects are created, the keywords
+ established with this :class:`.TemplateLookup` are passed on
+ to each new :class:`.Template`.
+
+ """
+
+ def __init__(
+ self,
+ directories=None,
+ module_directory=None,
+ filesystem_checks=True,
+ collection_size=-1,
+ format_exceptions=False,
+ error_handler=None,
+ disable_unicode=False,
+ bytestring_passthrough=False,
+ output_encoding=None,
+ encoding_errors="strict",
+ cache_args=None,
+ cache_impl="beaker",
+ cache_enabled=True,
+ cache_type=None,
+ cache_dir=None,
+ cache_url=None,
+ modulename_callable=None,
+ module_writer=None,
+ default_filters=None,
+ buffer_filters=(),
+ strict_undefined=False,
+ imports=None,
+ future_imports=None,
+ enable_loop=True,
+ input_encoding=None,
+ preprocessor=None,
+ lexer_cls=None,
+ include_error_handler=None,
+ ):
+
+ self.directories = [
+ posixpath.normpath(d) for d in util.to_list(directories, ())
+ ]
+ self.module_directory = module_directory
+ self.modulename_callable = modulename_callable
+ self.filesystem_checks = filesystem_checks
+ self.collection_size = collection_size
+
+ if cache_args is None:
+ cache_args = {}
+ # transfer deprecated cache_* args
+ if cache_dir:
+ cache_args.setdefault("dir", cache_dir)
+ if cache_url:
+ cache_args.setdefault("url", cache_url)
+ if cache_type:
+ cache_args.setdefault("type", cache_type)
+
+ self.template_args = {
+ "format_exceptions": format_exceptions,
+ "error_handler": error_handler,
+ "include_error_handler": include_error_handler,
+ "disable_unicode": disable_unicode,
+ "bytestring_passthrough": bytestring_passthrough,
+ "output_encoding": output_encoding,
+ "cache_impl": cache_impl,
+ "encoding_errors": encoding_errors,
+ "input_encoding": input_encoding,
+ "module_directory": module_directory,
+ "module_writer": module_writer,
+ "cache_args": cache_args,
+ "cache_enabled": cache_enabled,
+ "default_filters": default_filters,
+ "buffer_filters": buffer_filters,
+ "strict_undefined": strict_undefined,
+ "imports": imports,
+ "future_imports": future_imports,
+ "enable_loop": enable_loop,
+ "preprocessor": preprocessor,
+ "lexer_cls": lexer_cls,
+ }
+
+ if collection_size == -1:
+ self._collection = {}
+ self._uri_cache = {}
+ else:
+ self._collection = util.LRUCache(collection_size)
+ self._uri_cache = util.LRUCache(collection_size)
+ self._mutex = threading.Lock()
+
+ def get_template(self, uri):
+ """Return a :class:`.Template` object corresponding to the given
+ ``uri``.
+
+ .. note:: The ``relativeto`` argument is not supported here at
+ the moment.
+
+ """
+
+ try:
+ if self.filesystem_checks:
+ return self._check(uri, self._collection[uri])
+ else:
+ return self._collection[uri]
+ except KeyError:
+ u = re.sub(r"^\/+", "", uri)
+ for dir_ in self.directories:
+ # make sure the path seperators are posix - os.altsep is empty
+ # on POSIX and cannot be used.
+ dir_ = dir_.replace(os.path.sep, posixpath.sep)
+ srcfile = posixpath.normpath(posixpath.join(dir_, u))
+ if os.path.isfile(srcfile):
+ return self._load(srcfile, uri)
+ else:
+ raise exceptions.TopLevelLookupException(
+ "Cant locate template for uri %r" % uri
+ )
+
+ def adjust_uri(self, uri, relativeto):
+ """Adjust the given ``uri`` based on the given relative URI."""
+
+ key = (uri, relativeto)
+ if key in self._uri_cache:
+ return self._uri_cache[key]
+
+ if uri[0] != "/":
+ if relativeto is not None:
+ v = self._uri_cache[key] = posixpath.join(
+ posixpath.dirname(relativeto), uri
+ )
+ else:
+ v = self._uri_cache[key] = "/" + uri
+ else:
+ v = self._uri_cache[key] = uri
+ return v
+
+ def filename_to_uri(self, filename):
+ """Convert the given ``filename`` to a URI relative to
+ this :class:`.TemplateCollection`."""
+
+ try:
+ return self._uri_cache[filename]
+ except KeyError:
+ value = self._relativeize(filename)
+ self._uri_cache[filename] = value
+ return value
+
+ def _relativeize(self, filename):
+ """Return the portion of a filename that is 'relative'
+ to the directories in this lookup.
+
+ """
+
+ filename = posixpath.normpath(filename)
+ for dir_ in self.directories:
+ if filename[0 : len(dir_)] == dir_:
+ return filename[len(dir_) :]
+ else:
+ return None
+
+ def _load(self, filename, uri):
+ self._mutex.acquire()
+ try:
+ try:
+ # try returning from collection one
+ # more time in case concurrent thread already loaded
+ return self._collection[uri]
+ except KeyError:
+ pass
+ try:
+ if self.modulename_callable is not None:
+ module_filename = self.modulename_callable(filename, uri)
+ else:
+ module_filename = None
+ self._collection[uri] = template = Template(
+ uri=uri,
+ filename=posixpath.normpath(filename),
+ lookup=self,
+ module_filename=module_filename,
+ **self.template_args
+ )
+ return template
+ except:
+ # if compilation fails etc, ensure
+ # template is removed from collection,
+ # re-raise
+ self._collection.pop(uri, None)
+ raise
+ finally:
+ self._mutex.release()
+
+ def _check(self, uri, template):
+ if template.filename is None:
+ return template
+
+ try:
+ template_stat = os.stat(template.filename)
+ if template.module._modified_time < template_stat[stat.ST_MTIME]:
+ self._collection.pop(uri, None)
+ return self._load(template.filename, uri)
+ else:
+ return template
+ except OSError:
+ self._collection.pop(uri, None)
+ raise exceptions.TemplateLookupException(
+ "Cant locate template for uri %r" % uri
+ )
+
+ def put_string(self, uri, text):
+ """Place a new :class:`.Template` object into this
+ :class:`.TemplateLookup`, based on the given string of
+ ``text``.
+
+ """
+ self._collection[uri] = Template(
+ text, lookup=self, uri=uri, **self.template_args
+ )
+
+ def put_template(self, uri, template):
+ """Place a new :class:`.Template` object into this
+ :class:`.TemplateLookup`, based on the given
+ :class:`.Template` object.
+
+ """
+ self._collection[uri] = template
diff --git a/third_party/python/Mako/mako/parsetree.py b/third_party/python/Mako/mako/parsetree.py
new file mode 100644
index 0000000000..801e48a7f4
--- /dev/null
+++ b/third_party/python/Mako/mako/parsetree.py
@@ -0,0 +1,665 @@
+# mako/parsetree.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""defines the parse tree components for Mako templates."""
+
+import re
+
+from mako import ast
+from mako import compat
+from mako import exceptions
+from mako import filters
+from mako import util
+
+
+class Node(object):
+
+ """base class for a Node in the parse tree."""
+
+ def __init__(self, source, lineno, pos, filename):
+ self.source = source
+ self.lineno = lineno
+ self.pos = pos
+ self.filename = filename
+
+ @property
+ def exception_kwargs(self):
+ return {
+ "source": self.source,
+ "lineno": self.lineno,
+ "pos": self.pos,
+ "filename": self.filename,
+ }
+
+ def get_children(self):
+ return []
+
+ def accept_visitor(self, visitor):
+ def traverse(node):
+ for n in node.get_children():
+ n.accept_visitor(visitor)
+
+ method = getattr(visitor, "visit" + self.__class__.__name__, traverse)
+ method(self)
+
+
+class TemplateNode(Node):
+
+ """a 'container' node that stores the overall collection of nodes."""
+
+ def __init__(self, filename):
+ super(TemplateNode, self).__init__("", 0, 0, filename)
+ self.nodes = []
+ self.page_attributes = {}
+
+ def get_children(self):
+ return self.nodes
+
+ def __repr__(self):
+ return "TemplateNode(%s, %r)" % (
+ util.sorted_dict_repr(self.page_attributes),
+ self.nodes,
+ )
+
+
+class ControlLine(Node):
+
+ """defines a control line, a line-oriented python line or end tag.
+
+ e.g.::
+
+ % if foo:
+ (markup)
+ % endif
+
+ """
+
+ has_loop_context = False
+
+ def __init__(self, keyword, isend, text, **kwargs):
+ super(ControlLine, self).__init__(**kwargs)
+ self.text = text
+ self.keyword = keyword
+ self.isend = isend
+ self.is_primary = keyword in ["for", "if", "while", "try", "with"]
+ self.nodes = []
+ if self.isend:
+ self._declared_identifiers = []
+ self._undeclared_identifiers = []
+ else:
+ code = ast.PythonFragment(text, **self.exception_kwargs)
+ self._declared_identifiers = code.declared_identifiers
+ self._undeclared_identifiers = code.undeclared_identifiers
+
+ def get_children(self):
+ return self.nodes
+
+ def declared_identifiers(self):
+ return self._declared_identifiers
+
+ def undeclared_identifiers(self):
+ return self._undeclared_identifiers
+
+ def is_ternary(self, keyword):
+ """return true if the given keyword is a ternary keyword
+ for this ControlLine"""
+
+ return keyword in {
+ "if": set(["else", "elif"]),
+ "try": set(["except", "finally"]),
+ "for": set(["else"]),
+ }.get(self.keyword, [])
+
+ def __repr__(self):
+ return "ControlLine(%r, %r, %r, %r)" % (
+ self.keyword,
+ self.text,
+ self.isend,
+ (self.lineno, self.pos),
+ )
+
+
+class Text(Node):
+
+ """defines plain text in the template."""
+
+ def __init__(self, content, **kwargs):
+ super(Text, self).__init__(**kwargs)
+ self.content = content
+
+ def __repr__(self):
+ return "Text(%r, %r)" % (self.content, (self.lineno, self.pos))
+
+
+class Code(Node):
+
+ """defines a Python code block, either inline or module level.
+
+ e.g.::
+
+ inline:
+ <%
+ x = 12
+ %>
+
+ module level:
+ <%!
+ import logger
+ %>
+
+ """
+
+ def __init__(self, text, ismodule, **kwargs):
+ super(Code, self).__init__(**kwargs)
+ self.text = text
+ self.ismodule = ismodule
+ self.code = ast.PythonCode(text, **self.exception_kwargs)
+
+ def declared_identifiers(self):
+ return self.code.declared_identifiers
+
+ def undeclared_identifiers(self):
+ return self.code.undeclared_identifiers
+
+ def __repr__(self):
+ return "Code(%r, %r, %r)" % (
+ self.text,
+ self.ismodule,
+ (self.lineno, self.pos),
+ )
+
+
+class Comment(Node):
+
+ """defines a comment line.
+
+ # this is a comment
+
+ """
+
+ def __init__(self, text, **kwargs):
+ super(Comment, self).__init__(**kwargs)
+ self.text = text
+
+ def __repr__(self):
+ return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos))
+
+
+class Expression(Node):
+
+ """defines an inline expression.
+
+ ${x+y}
+
+ """
+
+ def __init__(self, text, escapes, **kwargs):
+ super(Expression, self).__init__(**kwargs)
+ self.text = text
+ self.escapes = escapes
+ self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs)
+ self.code = ast.PythonCode(text, **self.exception_kwargs)
+
+ def declared_identifiers(self):
+ return []
+
+ def undeclared_identifiers(self):
+ # TODO: make the "filter" shortcut list configurable at parse/gen time
+ return self.code.undeclared_identifiers.union(
+ self.escapes_code.undeclared_identifiers.difference(
+ set(filters.DEFAULT_ESCAPES.keys())
+ )
+ ).difference(self.code.declared_identifiers)
+
+ def __repr__(self):
+ return "Expression(%r, %r, %r)" % (
+ self.text,
+ self.escapes_code.args,
+ (self.lineno, self.pos),
+ )
+
+
+class _TagMeta(type):
+
+ """metaclass to allow Tag to produce a subclass according to
+ its keyword"""
+
+ _classmap = {}
+
+ def __init__(cls, clsname, bases, dict_):
+ if getattr(cls, "__keyword__", None) is not None:
+ cls._classmap[cls.__keyword__] = cls
+ super(_TagMeta, cls).__init__(clsname, bases, dict_)
+
+ def __call__(cls, keyword, attributes, **kwargs):
+ if ":" in keyword:
+ ns, defname = keyword.split(":")
+ return type.__call__(
+ CallNamespaceTag, ns, defname, attributes, **kwargs
+ )
+
+ try:
+ cls = _TagMeta._classmap[keyword]
+ except KeyError:
+ raise exceptions.CompileException(
+ "No such tag: '%s'" % keyword,
+ source=kwargs["source"],
+ lineno=kwargs["lineno"],
+ pos=kwargs["pos"],
+ filename=kwargs["filename"],
+ )
+ return type.__call__(cls, keyword, attributes, **kwargs)
+
+
+class Tag(compat.with_metaclass(_TagMeta, Node)):
+ """abstract base class for tags.
+
+ e.g.::
+
+ <%sometag/>
+
+ <%someothertag>
+ stuff
+ </%someothertag>
+
+ """
+
+ __keyword__ = None
+
+ def __init__(
+ self,
+ keyword,
+ attributes,
+ expressions,
+ nonexpressions,
+ required,
+ **kwargs
+ ):
+ r"""construct a new Tag instance.
+
+ this constructor not called directly, and is only called
+ by subclasses.
+
+ :param keyword: the tag keyword
+
+ :param attributes: raw dictionary of attribute key/value pairs
+
+ :param expressions: a set of identifiers that are legal attributes,
+ which can also contain embedded expressions
+
+ :param nonexpressions: a set of identifiers that are legal
+ attributes, which cannot contain embedded expressions
+
+ :param \**kwargs:
+ other arguments passed to the Node superclass (lineno, pos)
+
+ """
+ super(Tag, self).__init__(**kwargs)
+ self.keyword = keyword
+ self.attributes = attributes
+ self._parse_attributes(expressions, nonexpressions)
+ missing = [r for r in required if r not in self.parsed_attributes]
+ if len(missing):
+ raise exceptions.CompileException(
+ "Missing attribute(s): %s"
+ % ",".join([repr(m) for m in missing]),
+ **self.exception_kwargs
+ )
+ self.parent = None
+ self.nodes = []
+
+ def is_root(self):
+ return self.parent is None
+
+ def get_children(self):
+ return self.nodes
+
+ def _parse_attributes(self, expressions, nonexpressions):
+ undeclared_identifiers = set()
+ self.parsed_attributes = {}
+ for key in self.attributes:
+ if key in expressions:
+ expr = []
+ for x in re.compile(r"(\${.+?})", re.S).split(
+ self.attributes[key]
+ ):
+ m = re.compile(r"^\${(.+?)}$", re.S).match(x)
+ if m:
+ code = ast.PythonCode(
+ m.group(1).rstrip(), **self.exception_kwargs
+ )
+ # we aren't discarding "declared_identifiers" here,
+ # which we do so that list comprehension-declared
+ # variables aren't counted. As yet can't find a
+ # condition that requires it here.
+ undeclared_identifiers = undeclared_identifiers.union(
+ code.undeclared_identifiers
+ )
+ expr.append("(%s)" % m.group(1))
+ else:
+ if x:
+ expr.append(repr(x))
+ self.parsed_attributes[key] = " + ".join(expr) or repr("")
+ elif key in nonexpressions:
+ if re.search(r"\${.+?}", self.attributes[key]):
+ raise exceptions.CompileException(
+ "Attibute '%s' in tag '%s' does not allow embedded "
+ "expressions" % (key, self.keyword),
+ **self.exception_kwargs
+ )
+ self.parsed_attributes[key] = repr(self.attributes[key])
+ else:
+ raise exceptions.CompileException(
+ "Invalid attribute for tag '%s': '%s'"
+ % (self.keyword, key),
+ **self.exception_kwargs
+ )
+ self.expression_undeclared_identifiers = undeclared_identifiers
+
+ def declared_identifiers(self):
+ return []
+
+ def undeclared_identifiers(self):
+ return self.expression_undeclared_identifiers
+
+ def __repr__(self):
+ return "%s(%r, %s, %r, %r)" % (
+ self.__class__.__name__,
+ self.keyword,
+ util.sorted_dict_repr(self.attributes),
+ (self.lineno, self.pos),
+ self.nodes,
+ )
+
+
+class IncludeTag(Tag):
+ __keyword__ = "include"
+
+ def __init__(self, keyword, attributes, **kwargs):
+ super(IncludeTag, self).__init__(
+ keyword,
+ attributes,
+ ("file", "import", "args"),
+ (),
+ ("file",),
+ **kwargs
+ )
+ self.page_args = ast.PythonCode(
+ "__DUMMY(%s)" % attributes.get("args", ""), **self.exception_kwargs
+ )
+
+ def declared_identifiers(self):
+ return []
+
+ def undeclared_identifiers(self):
+ identifiers = self.page_args.undeclared_identifiers.difference(
+ set(["__DUMMY"])
+ ).difference(self.page_args.declared_identifiers)
+ return identifiers.union(
+ super(IncludeTag, self).undeclared_identifiers()
+ )
+
+
+class NamespaceTag(Tag):
+ __keyword__ = "namespace"
+
+ def __init__(self, keyword, attributes, **kwargs):
+ super(NamespaceTag, self).__init__(
+ keyword,
+ attributes,
+ ("file",),
+ ("name", "inheritable", "import", "module"),
+ (),
+ **kwargs
+ )
+
+ self.name = attributes.get("name", "__anon_%s" % hex(abs(id(self))))
+ if "name" not in attributes and "import" not in attributes:
+ raise exceptions.CompileException(
+ "'name' and/or 'import' attributes are required "
+ "for <%namespace>",
+ **self.exception_kwargs
+ )
+ if "file" in attributes and "module" in attributes:
+ raise exceptions.CompileException(
+ "<%namespace> may only have one of 'file' or 'module'",
+ **self.exception_kwargs
+ )
+
+ def declared_identifiers(self):
+ return []
+
+
+class TextTag(Tag):
+ __keyword__ = "text"
+
+ def __init__(self, keyword, attributes, **kwargs):
+ super(TextTag, self).__init__(
+ keyword, attributes, (), ("filter"), (), **kwargs
+ )
+ self.filter_args = ast.ArgumentList(
+ attributes.get("filter", ""), **self.exception_kwargs
+ )
+
+ def undeclared_identifiers(self):
+ return self.filter_args.undeclared_identifiers.difference(
+ filters.DEFAULT_ESCAPES.keys()
+ ).union(self.expression_undeclared_identifiers)
+
+
+class DefTag(Tag):
+ __keyword__ = "def"
+
+ def __init__(self, keyword, attributes, **kwargs):
+ expressions = ["buffered", "cached"] + [
+ c for c in attributes if c.startswith("cache_")
+ ]
+
+ super(DefTag, self).__init__(
+ keyword,
+ attributes,
+ expressions,
+ ("name", "filter", "decorator"),
+ ("name",),
+ **kwargs
+ )
+ name = attributes["name"]
+ if re.match(r"^[\w_]+$", name):
+ raise exceptions.CompileException(
+ "Missing parenthesis in %def", **self.exception_kwargs
+ )
+ self.function_decl = ast.FunctionDecl(
+ "def " + name + ":pass", **self.exception_kwargs
+ )
+ self.name = self.function_decl.funcname
+ self.decorator = attributes.get("decorator", "")
+ self.filter_args = ast.ArgumentList(
+ attributes.get("filter", ""), **self.exception_kwargs
+ )
+
+ is_anonymous = False
+ is_block = False
+
+ @property
+ def funcname(self):
+ return self.function_decl.funcname
+
+ def get_argument_expressions(self, **kw):
+ return self.function_decl.get_argument_expressions(**kw)
+
+ def declared_identifiers(self):
+ return self.function_decl.allargnames
+
+ def undeclared_identifiers(self):
+ res = []
+ for c in self.function_decl.defaults:
+ res += list(
+ ast.PythonCode(
+ c, **self.exception_kwargs
+ ).undeclared_identifiers
+ )
+ return (
+ set(res)
+ .union(
+ self.filter_args.undeclared_identifiers.difference(
+ filters.DEFAULT_ESCAPES.keys()
+ )
+ )
+ .union(self.expression_undeclared_identifiers)
+ .difference(self.function_decl.allargnames)
+ )
+
+
+class BlockTag(Tag):
+ __keyword__ = "block"
+
+ def __init__(self, keyword, attributes, **kwargs):
+ expressions = ["buffered", "cached", "args"] + [
+ c for c in attributes if c.startswith("cache_")
+ ]
+
+ super(BlockTag, self).__init__(
+ keyword,
+ attributes,
+ expressions,
+ ("name", "filter", "decorator"),
+ (),
+ **kwargs
+ )
+ name = attributes.get("name")
+ if name and not re.match(r"^[\w_]+$", name):
+ raise exceptions.CompileException(
+ "%block may not specify an argument signature",
+ **self.exception_kwargs
+ )
+ if not name and attributes.get("args", None):
+ raise exceptions.CompileException(
+ "Only named %blocks may specify args", **self.exception_kwargs
+ )
+ self.body_decl = ast.FunctionArgs(
+ attributes.get("args", ""), **self.exception_kwargs
+ )
+
+ self.name = name
+ self.decorator = attributes.get("decorator", "")
+ self.filter_args = ast.ArgumentList(
+ attributes.get("filter", ""), **self.exception_kwargs
+ )
+
+ is_block = True
+
+ @property
+ def is_anonymous(self):
+ return self.name is None
+
+ @property
+ def funcname(self):
+ return self.name or "__M_anon_%d" % (self.lineno,)
+
+ def get_argument_expressions(self, **kw):
+ return self.body_decl.get_argument_expressions(**kw)
+
+ def declared_identifiers(self):
+ return self.body_decl.allargnames
+
+ def undeclared_identifiers(self):
+ return (
+ self.filter_args.undeclared_identifiers.difference(
+ filters.DEFAULT_ESCAPES.keys()
+ )
+ ).union(self.expression_undeclared_identifiers)
+
+
+class CallTag(Tag):
+ __keyword__ = "call"
+
+ def __init__(self, keyword, attributes, **kwargs):
+ super(CallTag, self).__init__(
+ keyword, attributes, ("args"), ("expr",), ("expr",), **kwargs
+ )
+ self.expression = attributes["expr"]
+ self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
+ self.body_decl = ast.FunctionArgs(
+ attributes.get("args", ""), **self.exception_kwargs
+ )
+
+ def declared_identifiers(self):
+ return self.code.declared_identifiers.union(self.body_decl.allargnames)
+
+ def undeclared_identifiers(self):
+ return self.code.undeclared_identifiers.difference(
+ self.code.declared_identifiers
+ )
+
+
+class CallNamespaceTag(Tag):
+ def __init__(self, namespace, defname, attributes, **kwargs):
+ super(CallNamespaceTag, self).__init__(
+ namespace + ":" + defname,
+ attributes,
+ tuple(attributes.keys()) + ("args",),
+ (),
+ (),
+ **kwargs
+ )
+
+ self.expression = "%s.%s(%s)" % (
+ namespace,
+ defname,
+ ",".join(
+ [
+ "%s=%s" % (k, v)
+ for k, v in self.parsed_attributes.items()
+ if k != "args"
+ ]
+ ),
+ )
+ self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
+ self.body_decl = ast.FunctionArgs(
+ attributes.get("args", ""), **self.exception_kwargs
+ )
+
+ def declared_identifiers(self):
+ return self.code.declared_identifiers.union(self.body_decl.allargnames)
+
+ def undeclared_identifiers(self):
+ return self.code.undeclared_identifiers.difference(
+ self.code.declared_identifiers
+ )
+
+
+class InheritTag(Tag):
+ __keyword__ = "inherit"
+
+ def __init__(self, keyword, attributes, **kwargs):
+ super(InheritTag, self).__init__(
+ keyword, attributes, ("file",), (), ("file",), **kwargs
+ )
+
+
+class PageTag(Tag):
+ __keyword__ = "page"
+
+ def __init__(self, keyword, attributes, **kwargs):
+ expressions = [
+ "cached",
+ "args",
+ "expression_filter",
+ "enable_loop",
+ ] + [c for c in attributes if c.startswith("cache_")]
+
+ super(PageTag, self).__init__(
+ keyword, attributes, expressions, (), (), **kwargs
+ )
+ self.body_decl = ast.FunctionArgs(
+ attributes.get("args", ""), **self.exception_kwargs
+ )
+ self.filter_args = ast.ArgumentList(
+ attributes.get("expression_filter", ""), **self.exception_kwargs
+ )
+
+ def declared_identifiers(self):
+ return self.body_decl.allargnames
diff --git a/third_party/python/Mako/mako/pygen.py b/third_party/python/Mako/mako/pygen.py
new file mode 100644
index 0000000000..947721f1a5
--- /dev/null
+++ b/third_party/python/Mako/mako/pygen.py
@@ -0,0 +1,305 @@
+# mako/pygen.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""utilities for generating and formatting literal Python code."""
+
+import re
+
+from mako import exceptions
+
+
+class PythonPrinter(object):
+ def __init__(self, stream):
+ # indentation counter
+ self.indent = 0
+
+ # a stack storing information about why we incremented
+ # the indentation counter, to help us determine if we
+ # should decrement it
+ self.indent_detail = []
+
+ # the string of whitespace multiplied by the indent
+ # counter to produce a line
+ self.indentstring = " "
+
+ # the stream we are writing to
+ self.stream = stream
+
+ # current line number
+ self.lineno = 1
+
+ # a list of lines that represents a buffered "block" of code,
+ # which can be later printed relative to an indent level
+ self.line_buffer = []
+
+ self.in_indent_lines = False
+
+ self._reset_multi_line_flags()
+
+ # mapping of generated python lines to template
+ # source lines
+ self.source_map = {}
+
+ def _update_lineno(self, num):
+ self.lineno += num
+
+ def start_source(self, lineno):
+ if self.lineno not in self.source_map:
+ self.source_map[self.lineno] = lineno
+
+ def write_blanks(self, num):
+ self.stream.write("\n" * num)
+ self._update_lineno(num)
+
+ def write_indented_block(self, block, starting_lineno=None):
+ """print a line or lines of python which already contain indentation.
+
+ The indentation of the total block of lines will be adjusted to that of
+ the current indent level."""
+ self.in_indent_lines = False
+ for i, l in enumerate(re.split(r"\r?\n", block)):
+ self.line_buffer.append(l)
+ if starting_lineno is not None:
+ self.start_source(starting_lineno + i)
+ self._update_lineno(1)
+
+ def writelines(self, *lines):
+ """print a series of lines of python."""
+ for line in lines:
+ self.writeline(line)
+
+ def writeline(self, line):
+ """print a line of python, indenting it according to the current
+ indent level.
+
+ this also adjusts the indentation counter according to the
+ content of the line.
+
+ """
+
+ if not self.in_indent_lines:
+ self._flush_adjusted_lines()
+ self.in_indent_lines = True
+
+ if (
+ line is None
+ or re.match(r"^\s*#", line)
+ or re.match(r"^\s*$", line)
+ ):
+ hastext = False
+ else:
+ hastext = True
+
+ is_comment = line and len(line) and line[0] == "#"
+
+ # see if this line should decrease the indentation level
+ if not is_comment and (not hastext or self._is_unindentor(line)):
+
+ if self.indent > 0:
+ self.indent -= 1
+ # if the indent_detail stack is empty, the user
+ # probably put extra closures - the resulting
+ # module wont compile.
+ if len(self.indent_detail) == 0:
+ raise exceptions.SyntaxException(
+ "Too many whitespace closures"
+ )
+ self.indent_detail.pop()
+
+ if line is None:
+ return
+
+ # write the line
+ self.stream.write(self._indent_line(line) + "\n")
+ self._update_lineno(len(line.split("\n")))
+
+ # see if this line should increase the indentation level.
+ # note that a line can both decrase (before printing) and
+ # then increase (after printing) the indentation level.
+
+ if re.search(r":[ \t]*(?:#.*)?$", line):
+ # increment indentation count, and also
+ # keep track of what the keyword was that indented us,
+ # if it is a python compound statement keyword
+ # where we might have to look for an "unindent" keyword
+ match = re.match(r"^\s*(if|try|elif|while|for|with)", line)
+ if match:
+ # its a "compound" keyword, so we will check for "unindentors"
+ indentor = match.group(1)
+ self.indent += 1
+ self.indent_detail.append(indentor)
+ else:
+ indentor = None
+ # its not a "compound" keyword. but lets also
+ # test for valid Python keywords that might be indenting us,
+ # else assume its a non-indenting line
+ m2 = re.match(
+ r"^\s*(def|class|else|elif|except|finally)", line
+ )
+ if m2:
+ self.indent += 1
+ self.indent_detail.append(indentor)
+
+ def close(self):
+ """close this printer, flushing any remaining lines."""
+ self._flush_adjusted_lines()
+
+ def _is_unindentor(self, line):
+ """return true if the given line is an 'unindentor',
+ relative to the last 'indent' event received.
+
+ """
+
+ # no indentation detail has been pushed on; return False
+ if len(self.indent_detail) == 0:
+ return False
+
+ indentor = self.indent_detail[-1]
+
+ # the last indent keyword we grabbed is not a
+ # compound statement keyword; return False
+ if indentor is None:
+ return False
+
+ # if the current line doesnt have one of the "unindentor" keywords,
+ # return False
+ match = re.match(r"^\s*(else|elif|except|finally).*\:", line)
+ if not match:
+ return False
+
+ # whitespace matches up, we have a compound indentor,
+ # and this line has an unindentor, this
+ # is probably good enough
+ return True
+
+ # should we decide that its not good enough, heres
+ # more stuff to check.
+ # keyword = match.group(1)
+
+ # match the original indent keyword
+ # for crit in [
+ # (r'if|elif', r'else|elif'),
+ # (r'try', r'except|finally|else'),
+ # (r'while|for', r'else'),
+ # ]:
+ # if re.match(crit[0], indentor) and re.match(crit[1], keyword):
+ # return True
+
+ # return False
+
+ def _indent_line(self, line, stripspace=""):
+ """indent the given line according to the current indent level.
+
+ stripspace is a string of space that will be truncated from the
+ start of the line before indenting."""
+
+ return re.sub(
+ r"^%s" % stripspace, self.indentstring * self.indent, line
+ )
+
+ def _reset_multi_line_flags(self):
+ """reset the flags which would indicate we are in a backslashed
+ or triple-quoted section."""
+
+ self.backslashed, self.triplequoted = False, False
+
+ def _in_multi_line(self, line):
+ """return true if the given line is part of a multi-line block,
+ via backslash or triple-quote."""
+
+ # we are only looking for explicitly joined lines here, not
+ # implicit ones (i.e. brackets, braces etc.). this is just to
+ # guard against the possibility of modifying the space inside of
+ # a literal multiline string with unfortunately placed
+ # whitespace
+
+ current_state = self.backslashed or self.triplequoted
+
+ if re.search(r"\\$", line):
+ self.backslashed = True
+ else:
+ self.backslashed = False
+
+ triples = len(re.findall(r"\"\"\"|\'\'\'", line))
+ if triples == 1 or triples % 2 != 0:
+ self.triplequoted = not self.triplequoted
+
+ return current_state
+
+ def _flush_adjusted_lines(self):
+ stripspace = None
+ self._reset_multi_line_flags()
+
+ for entry in self.line_buffer:
+ if self._in_multi_line(entry):
+ self.stream.write(entry + "\n")
+ else:
+ entry = entry.expandtabs()
+ if stripspace is None and re.search(r"^[ \t]*[^# \t]", entry):
+ stripspace = re.match(r"^([ \t]*)", entry).group(1)
+ self.stream.write(self._indent_line(entry, stripspace) + "\n")
+
+ self.line_buffer = []
+ self._reset_multi_line_flags()
+
+
+def adjust_whitespace(text):
+ """remove the left-whitespace margin of a block of Python code."""
+
+ state = [False, False]
+ (backslashed, triplequoted) = (0, 1)
+
+ def in_multi_line(line):
+ start_state = state[backslashed] or state[triplequoted]
+
+ if re.search(r"\\$", line):
+ state[backslashed] = True
+ else:
+ state[backslashed] = False
+
+ def match(reg, t):
+ m = re.match(reg, t)
+ if m:
+ return m, t[len(m.group(0)) :]
+ else:
+ return None, t
+
+ while line:
+ if state[triplequoted]:
+ m, line = match(r"%s" % state[triplequoted], line)
+ if m:
+ state[triplequoted] = False
+ else:
+ m, line = match(r".*?(?=%s|$)" % state[triplequoted], line)
+ else:
+ m, line = match(r"#", line)
+ if m:
+ return start_state
+
+ m, line = match(r"\"\"\"|\'\'\'", line)
+ if m:
+ state[triplequoted] = m.group(0)
+ continue
+
+ m, line = match(r".*?(?=\"\"\"|\'\'\'|#|$)", line)
+
+ return start_state
+
+ def _indent_line(line, stripspace=""):
+ return re.sub(r"^%s" % stripspace, "", line)
+
+ lines = []
+ stripspace = None
+
+ for line in re.split(r"\r?\n", text):
+ if in_multi_line(line):
+ lines.append(line)
+ else:
+ line = line.expandtabs()
+ if stripspace is None and re.search(r"^[ \t]*[^# \t]", line):
+ stripspace = re.match(r"^([ \t]*)", line).group(1)
+ lines.append(_indent_line(line, stripspace))
+ return "\n".join(lines)
diff --git a/third_party/python/Mako/mako/pyparser.py b/third_party/python/Mako/mako/pyparser.py
new file mode 100644
index 0000000000..b16672d60c
--- /dev/null
+++ b/third_party/python/Mako/mako/pyparser.py
@@ -0,0 +1,242 @@
+# mako/pyparser.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Handles parsing of Python code.
+
+Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler
+module is used.
+"""
+
+import operator
+
+import _ast
+
+from mako import _ast_util
+from mako import compat
+from mako import exceptions
+from mako import util
+from mako.compat import arg_stringname
+
+if compat.py3k:
+ # words that cannot be assigned to (notably
+ # smaller than the total keys in __builtins__)
+ reserved = set(["True", "False", "None", "print"])
+
+ # the "id" attribute on a function node
+ arg_id = operator.attrgetter("arg")
+else:
+ # words that cannot be assigned to (notably
+ # smaller than the total keys in __builtins__)
+ reserved = set(["True", "False", "None"])
+
+ # the "id" attribute on a function node
+ arg_id = operator.attrgetter("id")
+
+util.restore__ast(_ast)
+
+
+def parse(code, mode="exec", **exception_kwargs):
+ """Parse an expression into AST"""
+
+ try:
+ return _ast_util.parse(code, "<unknown>", mode)
+ except Exception:
+ raise exceptions.SyntaxException(
+ "(%s) %s (%r)"
+ % (
+ compat.exception_as().__class__.__name__,
+ compat.exception_as(),
+ code[0:50],
+ ),
+ **exception_kwargs
+ )
+
+
+class FindIdentifiers(_ast_util.NodeVisitor):
+ def __init__(self, listener, **exception_kwargs):
+ self.in_function = False
+ self.in_assign_targets = False
+ self.local_ident_stack = set()
+ self.listener = listener
+ self.exception_kwargs = exception_kwargs
+
+ def _add_declared(self, name):
+ if not self.in_function:
+ self.listener.declared_identifiers.add(name)
+ else:
+ self.local_ident_stack.add(name)
+
+ def visit_ClassDef(self, node):
+ self._add_declared(node.name)
+
+ def visit_Assign(self, node):
+
+ # flip around the visiting of Assign so the expression gets
+ # evaluated first, in the case of a clause like "x=x+5" (x
+ # is undeclared)
+
+ self.visit(node.value)
+ in_a = self.in_assign_targets
+ self.in_assign_targets = True
+ for n in node.targets:
+ self.visit(n)
+ self.in_assign_targets = in_a
+
+ if compat.py3k:
+
+ # ExceptHandler is in Python 2, but this block only works in
+ # Python 3 (and is required there)
+
+ def visit_ExceptHandler(self, node):
+ if node.name is not None:
+ self._add_declared(node.name)
+ if node.type is not None:
+ self.visit(node.type)
+ for statement in node.body:
+ self.visit(statement)
+
+ def visit_Lambda(self, node, *args):
+ self._visit_function(node, True)
+
+ def visit_FunctionDef(self, node):
+ self._add_declared(node.name)
+ self._visit_function(node, False)
+
+ def _expand_tuples(self, args):
+ for arg in args:
+ if isinstance(arg, _ast.Tuple):
+ for n in arg.elts:
+ yield n
+ else:
+ yield arg
+
+ def _visit_function(self, node, islambda):
+
+ # push function state onto stack. dont log any more
+ # identifiers as "declared" until outside of the function,
+ # but keep logging identifiers as "undeclared". track
+ # argument names in each function header so they arent
+ # counted as "undeclared"
+
+ inf = self.in_function
+ self.in_function = True
+
+ local_ident_stack = self.local_ident_stack
+ self.local_ident_stack = local_ident_stack.union(
+ [arg_id(arg) for arg in self._expand_tuples(node.args.args)]
+ )
+ if islambda:
+ self.visit(node.body)
+ else:
+ for n in node.body:
+ self.visit(n)
+ self.in_function = inf
+ self.local_ident_stack = local_ident_stack
+
+ def visit_For(self, node):
+
+ # flip around visit
+
+ self.visit(node.iter)
+ self.visit(node.target)
+ for statement in node.body:
+ self.visit(statement)
+ for statement in node.orelse:
+ self.visit(statement)
+
+ def visit_Name(self, node):
+ if isinstance(node.ctx, _ast.Store):
+ # this is eqiuvalent to visit_AssName in
+ # compiler
+ self._add_declared(node.id)
+ elif (
+ node.id not in reserved
+ and node.id not in self.listener.declared_identifiers
+ and node.id not in self.local_ident_stack
+ ):
+ self.listener.undeclared_identifiers.add(node.id)
+
+ def visit_Import(self, node):
+ for name in node.names:
+ if name.asname is not None:
+ self._add_declared(name.asname)
+ else:
+ self._add_declared(name.name.split(".")[0])
+
+ def visit_ImportFrom(self, node):
+ for name in node.names:
+ if name.asname is not None:
+ self._add_declared(name.asname)
+ else:
+ if name.name == "*":
+ raise exceptions.CompileException(
+ "'import *' is not supported, since all identifier "
+ "names must be explicitly declared. Please use the "
+ "form 'from <modulename> import <name1>, <name2>, "
+ "...' instead.",
+ **self.exception_kwargs
+ )
+ self._add_declared(name.name)
+
+
+class FindTuple(_ast_util.NodeVisitor):
+ def __init__(self, listener, code_factory, **exception_kwargs):
+ self.listener = listener
+ self.exception_kwargs = exception_kwargs
+ self.code_factory = code_factory
+
+ def visit_Tuple(self, node):
+ for n in node.elts:
+ p = self.code_factory(n, **self.exception_kwargs)
+ self.listener.codeargs.append(p)
+ self.listener.args.append(ExpressionGenerator(n).value())
+ ldi = self.listener.declared_identifiers
+ self.listener.declared_identifiers = ldi.union(
+ p.declared_identifiers
+ )
+ lui = self.listener.undeclared_identifiers
+ self.listener.undeclared_identifiers = lui.union(
+ p.undeclared_identifiers
+ )
+
+
+class ParseFunc(_ast_util.NodeVisitor):
+ def __init__(self, listener, **exception_kwargs):
+ self.listener = listener
+ self.exception_kwargs = exception_kwargs
+
+ def visit_FunctionDef(self, node):
+ self.listener.funcname = node.name
+
+ argnames = [arg_id(arg) for arg in node.args.args]
+ if node.args.vararg:
+ argnames.append(arg_stringname(node.args.vararg))
+
+ if compat.py2k:
+ # kw-only args don't exist in Python 2
+ kwargnames = []
+ else:
+ kwargnames = [arg_id(arg) for arg in node.args.kwonlyargs]
+ if node.args.kwarg:
+ kwargnames.append(arg_stringname(node.args.kwarg))
+ self.listener.argnames = argnames
+ self.listener.defaults = node.args.defaults # ast
+ self.listener.kwargnames = kwargnames
+ if compat.py2k:
+ self.listener.kwdefaults = []
+ else:
+ self.listener.kwdefaults = node.args.kw_defaults
+ self.listener.varargs = node.args.vararg
+ self.listener.kwargs = node.args.kwarg
+
+
+class ExpressionGenerator(object):
+ def __init__(self, astnode):
+ self.generator = _ast_util.SourceGenerator(" " * 4)
+ self.generator.visit(astnode)
+
+ def value(self):
+ return "".join(self.generator.result)
diff --git a/third_party/python/Mako/mako/runtime.py b/third_party/python/Mako/mako/runtime.py
new file mode 100644
index 0000000000..465908e6d8
--- /dev/null
+++ b/third_party/python/Mako/mako/runtime.py
@@ -0,0 +1,970 @@
+# mako/runtime.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""provides runtime services for templates, including Context,
+Namespace, and various helper functions."""
+
+import functools
+import sys
+
+from mako import compat
+from mako import exceptions
+from mako import util
+from mako.compat import compat_builtins
+
+
+class Context(object):
+
+ """Provides runtime namespace, output buffer, and various
+ callstacks for templates.
+
+ See :ref:`runtime_toplevel` for detail on the usage of
+ :class:`.Context`.
+
+ """
+
+ def __init__(self, buffer, **data):
+ self._buffer_stack = [buffer]
+
+ self._data = data
+
+ self._kwargs = data.copy()
+ self._with_template = None
+ self._outputting_as_unicode = None
+ self.namespaces = {}
+
+ # "capture" function which proxies to the
+ # generic "capture" function
+ self._data["capture"] = functools.partial(capture, self)
+
+ # "caller" stack used by def calls with content
+ self.caller_stack = self._data["caller"] = CallerStack()
+
+ def _set_with_template(self, t):
+ self._with_template = t
+ illegal_names = t.reserved_names.intersection(self._data)
+ if illegal_names:
+ raise exceptions.NameConflictError(
+ "Reserved words passed to render(): %s"
+ % ", ".join(illegal_names)
+ )
+
+ @property
+ def lookup(self):
+ """Return the :class:`.TemplateLookup` associated
+ with this :class:`.Context`.
+
+ """
+ return self._with_template.lookup
+
+ @property
+ def kwargs(self):
+ """Return the dictionary of top level keyword arguments associated
+ with this :class:`.Context`.
+
+ This dictionary only includes the top-level arguments passed to
+ :meth:`.Template.render`. It does not include names produced within
+ the template execution such as local variable names or special names
+ such as ``self``, ``next``, etc.
+
+ The purpose of this dictionary is primarily for the case that
+ a :class:`.Template` accepts arguments via its ``<%page>`` tag,
+ which are normally expected to be passed via :meth:`.Template.render`,
+ except the template is being called in an inheritance context,
+ using the ``body()`` method. :attr:`.Context.kwargs` can then be
+ used to propagate these arguments to the inheriting template::
+
+ ${next.body(**context.kwargs)}
+
+ """
+ return self._kwargs.copy()
+
+ def push_caller(self, caller):
+ """Push a ``caller`` callable onto the callstack for
+ this :class:`.Context`."""
+
+ self.caller_stack.append(caller)
+
+ def pop_caller(self):
+ """Pop a ``caller`` callable onto the callstack for this
+ :class:`.Context`."""
+
+ del self.caller_stack[-1]
+
+ def keys(self):
+ """Return a list of all names established in this :class:`.Context`."""
+
+ return list(self._data.keys())
+
+ def __getitem__(self, key):
+ if key in self._data:
+ return self._data[key]
+ else:
+ return compat_builtins.__dict__[key]
+
+ def _push_writer(self):
+ """push a capturing buffer onto this Context and return
+ the new writer function."""
+
+ buf = util.FastEncodingBuffer()
+ self._buffer_stack.append(buf)
+ return buf.write
+
+ def _pop_buffer_and_writer(self):
+ """pop the most recent capturing buffer from this Context
+ and return the current writer after the pop.
+
+ """
+
+ buf = self._buffer_stack.pop()
+ return buf, self._buffer_stack[-1].write
+
+ def _push_buffer(self):
+ """push a capturing buffer onto this Context."""
+
+ self._push_writer()
+
+ def _pop_buffer(self):
+ """pop the most recent capturing buffer from this Context."""
+
+ return self._buffer_stack.pop()
+
+ def get(self, key, default=None):
+ """Return a value from this :class:`.Context`."""
+
+ return self._data.get(key, compat_builtins.__dict__.get(key, default))
+
+ def write(self, string):
+ """Write a string to this :class:`.Context` object's
+ underlying output buffer."""
+
+ self._buffer_stack[-1].write(string)
+
+ def writer(self):
+ """Return the current writer function."""
+
+ return self._buffer_stack[-1].write
+
+ def _copy(self):
+ c = Context.__new__(Context)
+ c._buffer_stack = self._buffer_stack
+ c._data = self._data.copy()
+ c._kwargs = self._kwargs
+ c._with_template = self._with_template
+ c._outputting_as_unicode = self._outputting_as_unicode
+ c.namespaces = self.namespaces
+ c.caller_stack = self.caller_stack
+ return c
+
+ def _locals(self, d):
+ """Create a new :class:`.Context` with a copy of this
+ :class:`.Context`'s current state,
+ updated with the given dictionary.
+
+ The :attr:`.Context.kwargs` collection remains
+ unaffected.
+
+
+ """
+
+ if not d:
+ return self
+ c = self._copy()
+ c._data.update(d)
+ return c
+
+ def _clean_inheritance_tokens(self):
+ """create a new copy of this :class:`.Context`. with
+ tokens related to inheritance state removed."""
+
+ c = self._copy()
+ x = c._data
+ x.pop("self", None)
+ x.pop("parent", None)
+ x.pop("next", None)
+ return c
+
+
+class CallerStack(list):
+ def __init__(self):
+ self.nextcaller = None
+
+ def __nonzero__(self):
+ return self.__bool__()
+
+ def __bool__(self):
+ return len(self) and self._get_caller() and True or False
+
+ def _get_caller(self):
+ # this method can be removed once
+ # codegen MAGIC_NUMBER moves past 7
+ return self[-1]
+
+ def __getattr__(self, key):
+ return getattr(self._get_caller(), key)
+
+ def _push_frame(self):
+ frame = self.nextcaller or None
+ self.append(frame)
+ self.nextcaller = None
+ return frame
+
+ def _pop_frame(self):
+ self.nextcaller = self.pop()
+
+
+class Undefined(object):
+
+ """Represents an undefined value in a template.
+
+ All template modules have a constant value
+ ``UNDEFINED`` present which is an instance of this
+ object.
+
+ """
+
+ def __str__(self):
+ raise NameError("Undefined")
+
+ def __nonzero__(self):
+ return self.__bool__()
+
+ def __bool__(self):
+ return False
+
+
+UNDEFINED = Undefined()
+STOP_RENDERING = ""
+
+
+class LoopStack(object):
+
+ """a stack for LoopContexts that implements the context manager protocol
+ to automatically pop off the top of the stack on context exit
+ """
+
+ def __init__(self):
+ self.stack = []
+
+ def _enter(self, iterable):
+ self._push(iterable)
+ return self._top
+
+ def _exit(self):
+ self._pop()
+ return self._top
+
+ @property
+ def _top(self):
+ if self.stack:
+ return self.stack[-1]
+ else:
+ return self
+
+ def _pop(self):
+ return self.stack.pop()
+
+ def _push(self, iterable):
+ new = LoopContext(iterable)
+ if self.stack:
+ new.parent = self.stack[-1]
+ return self.stack.append(new)
+
+ def __getattr__(self, key):
+ raise exceptions.RuntimeException("No loop context is established")
+
+ def __iter__(self):
+ return iter(self._top)
+
+
+class LoopContext(object):
+
+ """A magic loop variable.
+ Automatically accessible in any ``% for`` block.
+
+ See the section :ref:`loop_context` for usage
+ notes.
+
+ :attr:`parent` -> :class:`.LoopContext` or ``None``
+ The parent loop, if one exists.
+ :attr:`index` -> `int`
+ The 0-based iteration count.
+ :attr:`reverse_index` -> `int`
+ The number of iterations remaining.
+ :attr:`first` -> `bool`
+ ``True`` on the first iteration, ``False`` otherwise.
+ :attr:`last` -> `bool`
+ ``True`` on the last iteration, ``False`` otherwise.
+ :attr:`even` -> `bool`
+ ``True`` when ``index`` is even.
+ :attr:`odd` -> `bool`
+ ``True`` when ``index`` is odd.
+ """
+
+ def __init__(self, iterable):
+ self._iterable = iterable
+ self.index = 0
+ self.parent = None
+
+ def __iter__(self):
+ for i in self._iterable:
+ yield i
+ self.index += 1
+
+ @util.memoized_instancemethod
+ def __len__(self):
+ return len(self._iterable)
+
+ @property
+ def reverse_index(self):
+ return len(self) - self.index - 1
+
+ @property
+ def first(self):
+ return self.index == 0
+
+ @property
+ def last(self):
+ return self.index == len(self) - 1
+
+ @property
+ def even(self):
+ return not self.odd
+
+ @property
+ def odd(self):
+ return bool(self.index % 2)
+
+ def cycle(self, *values):
+ """Cycle through values as the loop progresses.
+ """
+ if not values:
+ raise ValueError("You must provide values to cycle through")
+ return values[self.index % len(values)]
+
+
+class _NSAttr(object):
+ def __init__(self, parent):
+ self.__parent = parent
+
+ def __getattr__(self, key):
+ ns = self.__parent
+ while ns:
+ if hasattr(ns.module, key):
+ return getattr(ns.module, key)
+ else:
+ ns = ns.inherits
+ raise AttributeError(key)
+
+
+class Namespace(object):
+
+ """Provides access to collections of rendering methods, which
+ can be local, from other templates, or from imported modules.
+
+ To access a particular rendering method referenced by a
+ :class:`.Namespace`, use plain attribute access:
+
+ .. sourcecode:: mako
+
+ ${some_namespace.foo(x, y, z)}
+
+ :class:`.Namespace` also contains several built-in attributes
+ described here.
+
+ """
+
+ def __init__(
+ self,
+ name,
+ context,
+ callables=None,
+ inherits=None,
+ populate_self=True,
+ calling_uri=None,
+ ):
+ self.name = name
+ self.context = context
+ self.inherits = inherits
+ if callables is not None:
+ self.callables = dict([(c.__name__, c) for c in callables])
+
+ callables = ()
+
+ module = None
+ """The Python module referenced by this :class:`.Namespace`.
+
+ If the namespace references a :class:`.Template`, then
+ this module is the equivalent of ``template.module``,
+ i.e. the generated module for the template.
+
+ """
+
+ template = None
+ """The :class:`.Template` object referenced by this
+ :class:`.Namespace`, if any.
+
+ """
+
+ context = None
+ """The :class:`.Context` object for this :class:`.Namespace`.
+
+ Namespaces are often created with copies of contexts that
+ contain slightly different data, particularly in inheritance
+ scenarios. Using the :class:`.Context` off of a :class:`.Namespace` one
+ can traverse an entire chain of templates that inherit from
+ one-another.
+
+ """
+
+ filename = None
+ """The path of the filesystem file used for this
+ :class:`.Namespace`'s module or template.
+
+ If this is a pure module-based
+ :class:`.Namespace`, this evaluates to ``module.__file__``. If a
+ template-based namespace, it evaluates to the original
+ template file location.
+
+ """
+
+ uri = None
+ """The URI for this :class:`.Namespace`'s template.
+
+ I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`.
+
+ This is the equivalent of :attr:`.Template.uri`.
+
+ """
+
+ _templateuri = None
+
+ @util.memoized_property
+ def attr(self):
+ """Access module level attributes by name.
+
+ This accessor allows templates to supply "scalar"
+ attributes which are particularly handy in inheritance
+ relationships.
+
+ .. seealso::
+
+ :ref:`inheritance_attr`
+
+ :ref:`namespace_attr_for_includes`
+
+ """
+ return _NSAttr(self)
+
+ def get_namespace(self, uri):
+ """Return a :class:`.Namespace` corresponding to the given ``uri``.
+
+ If the given ``uri`` is a relative URI (i.e. it does not
+ contain a leading slash ``/``), the ``uri`` is adjusted to
+ be relative to the ``uri`` of the namespace itself. This
+ method is therefore mostly useful off of the built-in
+ ``local`` namespace, described in :ref:`namespace_local`.
+
+ In
+ most cases, a template wouldn't need this function, and
+ should instead use the ``<%namespace>`` tag to load
+ namespaces. However, since all ``<%namespace>`` tags are
+ evaluated before the body of a template ever runs,
+ this method can be used to locate namespaces using
+ expressions that were generated within the body code of
+ the template, or to conditionally use a particular
+ namespace.
+
+ """
+ key = (self, uri)
+ if key in self.context.namespaces:
+ return self.context.namespaces[key]
+ else:
+ ns = TemplateNamespace(
+ uri,
+ self.context._copy(),
+ templateuri=uri,
+ calling_uri=self._templateuri,
+ )
+ self.context.namespaces[key] = ns
+ return ns
+
+ def get_template(self, uri):
+ """Return a :class:`.Template` from the given ``uri``.
+
+ The ``uri`` resolution is relative to the ``uri`` of this
+ :class:`.Namespace` object's :class:`.Template`.
+
+ """
+ return _lookup_template(self.context, uri, self._templateuri)
+
+ def get_cached(self, key, **kwargs):
+ """Return a value from the :class:`.Cache` referenced by this
+ :class:`.Namespace` object's :class:`.Template`.
+
+ The advantage to this method versus direct access to the
+ :class:`.Cache` is that the configuration parameters
+ declared in ``<%page>`` take effect here, thereby calling
+ up the same configured backend as that configured
+ by ``<%page>``.
+
+ """
+
+ return self.cache.get(key, **kwargs)
+
+ @property
+ def cache(self):
+ """Return the :class:`.Cache` object referenced
+ by this :class:`.Namespace` object's
+ :class:`.Template`.
+
+ """
+ return self.template.cache
+
+ def include_file(self, uri, **kwargs):
+ """Include a file at the given ``uri``."""
+
+ _include_file(self.context, uri, self._templateuri, **kwargs)
+
+ def _populate(self, d, l):
+ for ident in l:
+ if ident == "*":
+ for (k, v) in self._get_star():
+ d[k] = v
+ else:
+ d[ident] = getattr(self, ident)
+
+ def _get_star(self):
+ if self.callables:
+ for key in self.callables:
+ yield (key, self.callables[key])
+
+ def __getattr__(self, key):
+ if key in self.callables:
+ val = self.callables[key]
+ elif self.inherits:
+ val = getattr(self.inherits, key)
+ else:
+ raise AttributeError(
+ "Namespace '%s' has no member '%s'" % (self.name, key)
+ )
+ setattr(self, key, val)
+ return val
+
+
+class TemplateNamespace(Namespace):
+
+ """A :class:`.Namespace` specific to a :class:`.Template` instance."""
+
+ def __init__(
+ self,
+ name,
+ context,
+ template=None,
+ templateuri=None,
+ callables=None,
+ inherits=None,
+ populate_self=True,
+ calling_uri=None,
+ ):
+ self.name = name
+ self.context = context
+ self.inherits = inherits
+ if callables is not None:
+ self.callables = dict([(c.__name__, c) for c in callables])
+
+ if templateuri is not None:
+ self.template = _lookup_template(context, templateuri, calling_uri)
+ self._templateuri = self.template.module._template_uri
+ elif template is not None:
+ self.template = template
+ self._templateuri = template.module._template_uri
+ else:
+ raise TypeError("'template' argument is required.")
+
+ if populate_self:
+ lclcallable, lclcontext = _populate_self_namespace(
+ context, self.template, self_ns=self
+ )
+
+ @property
+ def module(self):
+ """The Python module referenced by this :class:`.Namespace`.
+
+ If the namespace references a :class:`.Template`, then
+ this module is the equivalent of ``template.module``,
+ i.e. the generated module for the template.
+
+ """
+ return self.template.module
+
+ @property
+ def filename(self):
+ """The path of the filesystem file used for this
+ :class:`.Namespace`'s module or template.
+ """
+ return self.template.filename
+
+ @property
+ def uri(self):
+ """The URI for this :class:`.Namespace`'s template.
+
+ I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`.
+
+ This is the equivalent of :attr:`.Template.uri`.
+
+ """
+ return self.template.uri
+
+ def _get_star(self):
+ if self.callables:
+ for key in self.callables:
+ yield (key, self.callables[key])
+
+ def get(key):
+ callable_ = self.template._get_def_callable(key)
+ return functools.partial(callable_, self.context)
+
+ for k in self.template.module._exports:
+ yield (k, get(k))
+
+ def __getattr__(self, key):
+ if key in self.callables:
+ val = self.callables[key]
+ elif self.template.has_def(key):
+ callable_ = self.template._get_def_callable(key)
+ val = functools.partial(callable_, self.context)
+ elif self.inherits:
+ val = getattr(self.inherits, key)
+
+ else:
+ raise AttributeError(
+ "Namespace '%s' has no member '%s'" % (self.name, key)
+ )
+ setattr(self, key, val)
+ return val
+
+
+class ModuleNamespace(Namespace):
+
+ """A :class:`.Namespace` specific to a Python module instance."""
+
+ def __init__(
+ self,
+ name,
+ context,
+ module,
+ callables=None,
+ inherits=None,
+ populate_self=True,
+ calling_uri=None,
+ ):
+ self.name = name
+ self.context = context
+ self.inherits = inherits
+ if callables is not None:
+ self.callables = dict([(c.__name__, c) for c in callables])
+
+ mod = __import__(module)
+ for token in module.split(".")[1:]:
+ mod = getattr(mod, token)
+ self.module = mod
+
+ @property
+ def filename(self):
+ """The path of the filesystem file used for this
+ :class:`.Namespace`'s module or template.
+ """
+ return self.module.__file__
+
+ def _get_star(self):
+ if self.callables:
+ for key in self.callables:
+ yield (key, self.callables[key])
+ for key in dir(self.module):
+ if key[0] != "_":
+ callable_ = getattr(self.module, key)
+ if callable(callable_):
+ yield key, functools.partial(callable_, self.context)
+
+ def __getattr__(self, key):
+ if key in self.callables:
+ val = self.callables[key]
+ elif hasattr(self.module, key):
+ callable_ = getattr(self.module, key)
+ val = functools.partial(callable_, self.context)
+ elif self.inherits:
+ val = getattr(self.inherits, key)
+ else:
+ raise AttributeError(
+ "Namespace '%s' has no member '%s'" % (self.name, key)
+ )
+ setattr(self, key, val)
+ return val
+
+
+def supports_caller(func):
+ """Apply a caller_stack compatibility decorator to a plain
+ Python function.
+
+ See the example in :ref:`namespaces_python_modules`.
+
+ """
+
+ def wrap_stackframe(context, *args, **kwargs):
+ context.caller_stack._push_frame()
+ try:
+ return func(context, *args, **kwargs)
+ finally:
+ context.caller_stack._pop_frame()
+
+ return wrap_stackframe
+
+
+def capture(context, callable_, *args, **kwargs):
+ """Execute the given template def, capturing the output into
+ a buffer.
+
+ See the example in :ref:`namespaces_python_modules`.
+
+ """
+
+ if not callable(callable_):
+ raise exceptions.RuntimeException(
+ "capture() function expects a callable as "
+ "its argument (i.e. capture(func, *args, **kwargs))"
+ )
+ context._push_buffer()
+ try:
+ callable_(*args, **kwargs)
+ finally:
+ buf = context._pop_buffer()
+ return buf.getvalue()
+
+
+def _decorate_toplevel(fn):
+ def decorate_render(render_fn):
+ def go(context, *args, **kw):
+ def y(*args, **kw):
+ return render_fn(context, *args, **kw)
+
+ try:
+ y.__name__ = render_fn.__name__[7:]
+ except TypeError:
+ # < Python 2.4
+ pass
+ return fn(y)(context, *args, **kw)
+
+ return go
+
+ return decorate_render
+
+
+def _decorate_inline(context, fn):
+ def decorate_render(render_fn):
+ dec = fn(render_fn)
+
+ def go(*args, **kw):
+ return dec(context, *args, **kw)
+
+ return go
+
+ return decorate_render
+
+
+def _include_file(context, uri, calling_uri, **kwargs):
+ """locate the template from the given uri and include it in
+ the current output."""
+
+ template = _lookup_template(context, uri, calling_uri)
+ (callable_, ctx) = _populate_self_namespace(
+ context._clean_inheritance_tokens(), template
+ )
+ kwargs = _kwargs_for_include(callable_, context._data, **kwargs)
+ if template.include_error_handler:
+ try:
+ callable_(ctx, **kwargs)
+ except Exception:
+ result = template.include_error_handler(ctx, compat.exception_as())
+ if not result:
+ compat.reraise(*sys.exc_info())
+ else:
+ callable_(ctx, **kwargs)
+
+
+def _inherit_from(context, uri, calling_uri):
+ """called by the _inherit method in template modules to set
+ up the inheritance chain at the start of a template's
+ execution."""
+
+ if uri is None:
+ return None
+ template = _lookup_template(context, uri, calling_uri)
+ self_ns = context["self"]
+ ih = self_ns
+ while ih.inherits is not None:
+ ih = ih.inherits
+ lclcontext = context._locals({"next": ih})
+ ih.inherits = TemplateNamespace(
+ "self:%s" % template.uri,
+ lclcontext,
+ template=template,
+ populate_self=False,
+ )
+ context._data["parent"] = lclcontext._data["local"] = ih.inherits
+ callable_ = getattr(template.module, "_mako_inherit", None)
+ if callable_ is not None:
+ ret = callable_(template, lclcontext)
+ if ret:
+ return ret
+
+ gen_ns = getattr(template.module, "_mako_generate_namespaces", None)
+ if gen_ns is not None:
+ gen_ns(context)
+ return (template.callable_, lclcontext)
+
+
+def _lookup_template(context, uri, relativeto):
+ lookup = context._with_template.lookup
+ if lookup is None:
+ raise exceptions.TemplateLookupException(
+ "Template '%s' has no TemplateLookup associated"
+ % context._with_template.uri
+ )
+ uri = lookup.adjust_uri(uri, relativeto)
+ try:
+ return lookup.get_template(uri)
+ except exceptions.TopLevelLookupException:
+ raise exceptions.TemplateLookupException(str(compat.exception_as()))
+
+
+def _populate_self_namespace(context, template, self_ns=None):
+ if self_ns is None:
+ self_ns = TemplateNamespace(
+ "self:%s" % template.uri,
+ context,
+ template=template,
+ populate_self=False,
+ )
+ context._data["self"] = context._data["local"] = self_ns
+ if hasattr(template.module, "_mako_inherit"):
+ ret = template.module._mako_inherit(template, context)
+ if ret:
+ return ret
+ return (template.callable_, context)
+
+
+def _render(template, callable_, args, data, as_unicode=False):
+ """create a Context and return the string
+ output of the given template and template callable."""
+
+ if as_unicode:
+ buf = util.FastEncodingBuffer(as_unicode=True)
+ elif template.bytestring_passthrough:
+ buf = compat.StringIO()
+ else:
+ buf = util.FastEncodingBuffer(
+ as_unicode=as_unicode,
+ encoding=template.output_encoding,
+ errors=template.encoding_errors,
+ )
+ context = Context(buf, **data)
+ context._outputting_as_unicode = as_unicode
+ context._set_with_template(template)
+
+ _render_context(
+ template,
+ callable_,
+ context,
+ *args,
+ **_kwargs_for_callable(callable_, data)
+ )
+ return context._pop_buffer().getvalue()
+
+
+def _kwargs_for_callable(callable_, data):
+ argspec = compat.inspect_getargspec(callable_)
+ # for normal pages, **pageargs is usually present
+ if argspec[2]:
+ return data
+
+ # for rendering defs from the top level, figure out the args
+ namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
+ kwargs = {}
+ for arg in namedargs:
+ if arg != "context" and arg in data and arg not in kwargs:
+ kwargs[arg] = data[arg]
+ return kwargs
+
+
+def _kwargs_for_include(callable_, data, **kwargs):
+ argspec = compat.inspect_getargspec(callable_)
+ namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
+ for arg in namedargs:
+ if arg != "context" and arg in data and arg not in kwargs:
+ kwargs[arg] = data[arg]
+ return kwargs
+
+
+def _render_context(tmpl, callable_, context, *args, **kwargs):
+ import mako.template as template
+
+ # create polymorphic 'self' namespace for this
+ # template with possibly updated context
+ if not isinstance(tmpl, template.DefTemplate):
+ # if main render method, call from the base of the inheritance stack
+ (inherit, lclcontext) = _populate_self_namespace(context, tmpl)
+ _exec_template(inherit, lclcontext, args=args, kwargs=kwargs)
+ else:
+ # otherwise, call the actual rendering method specified
+ (inherit, lclcontext) = _populate_self_namespace(context, tmpl.parent)
+ _exec_template(callable_, context, args=args, kwargs=kwargs)
+
+
+def _exec_template(callable_, context, args=None, kwargs=None):
+ """execute a rendering callable given the callable, a
+ Context, and optional explicit arguments
+
+ the contextual Template will be located if it exists, and
+ the error handling options specified on that Template will
+ be interpreted here.
+ """
+ template = context._with_template
+ if template is not None and (
+ template.format_exceptions or template.error_handler
+ ):
+ try:
+ callable_(context, *args, **kwargs)
+ except Exception:
+ _render_error(template, context, compat.exception_as())
+ except:
+ e = sys.exc_info()[0]
+ _render_error(template, context, e)
+ else:
+ callable_(context, *args, **kwargs)
+
+
+def _render_error(template, context, error):
+ if template.error_handler:
+ result = template.error_handler(context, error)
+ if not result:
+ compat.reraise(*sys.exc_info())
+ else:
+ error_template = exceptions.html_error_template()
+ if context._outputting_as_unicode:
+ context._buffer_stack[:] = [
+ util.FastEncodingBuffer(as_unicode=True)
+ ]
+ else:
+ context._buffer_stack[:] = [
+ util.FastEncodingBuffer(
+ error_template.output_encoding,
+ error_template.encoding_errors,
+ )
+ ]
+
+ context._set_with_template(error_template)
+ error_template.render_context(context, error=error)
diff --git a/third_party/python/Mako/mako/template.py b/third_party/python/Mako/mako/template.py
new file mode 100644
index 0000000000..3fd08714b8
--- /dev/null
+++ b/third_party/python/Mako/mako/template.py
@@ -0,0 +1,780 @@
+# mako/template.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Provides the Template class, a facade for parsing, generating and executing
+template strings, as well as template runtime operations."""
+
+import json
+import os
+import re
+import shutil
+import stat
+import sys
+import tempfile
+import types
+import weakref
+
+from mako import cache
+from mako import codegen
+from mako import compat
+from mako import exceptions
+from mako import runtime
+from mako import util
+from mako.lexer import Lexer
+
+
+class Template(object):
+
+ r"""Represents a compiled template.
+
+ :class:`.Template` includes a reference to the original
+ template source (via the :attr:`.source` attribute)
+ as well as the source code of the
+ generated Python module (i.e. the :attr:`.code` attribute),
+ as well as a reference to an actual Python module.
+
+ :class:`.Template` is constructed using either a literal string
+ representing the template text, or a filename representing a filesystem
+ path to a source file.
+
+ :param text: textual template source. This argument is mutually
+ exclusive versus the ``filename`` parameter.
+
+ :param filename: filename of the source template. This argument is
+ mutually exclusive versus the ``text`` parameter.
+
+ :param buffer_filters: string list of filters to be applied
+ to the output of ``%def``\ s which are buffered, cached, or otherwise
+ filtered, after all filters
+ defined with the ``%def`` itself have been applied. Allows the
+ creation of default expression filters that let the output
+ of return-valued ``%def``\ s "opt out" of that filtering via
+ passing special attributes or objects.
+
+ :param bytestring_passthrough: When ``True``, and ``output_encoding`` is
+ set to ``None``, and :meth:`.Template.render` is used to render,
+ the `StringIO` or `cStringIO` buffer will be used instead of the
+ default "fast" buffer. This allows raw bytestrings in the
+ output stream, such as in expressions, to pass straight
+ through to the buffer. This flag is forced
+ to ``True`` if ``disable_unicode`` is also configured.
+
+ .. versionadded:: 0.4
+ Added to provide the same behavior as that of the previous series.
+
+ :param cache_args: Dictionary of cache configuration arguments that
+ will be passed to the :class:`.CacheImpl`. See :ref:`caching_toplevel`.
+
+ :param cache_dir:
+
+ .. deprecated:: 0.6
+ Use the ``'dir'`` argument in the ``cache_args`` dictionary.
+ See :ref:`caching_toplevel`.
+
+ :param cache_enabled: Boolean flag which enables caching of this
+ template. See :ref:`caching_toplevel`.
+
+ :param cache_impl: String name of a :class:`.CacheImpl` caching
+ implementation to use. Defaults to ``'beaker'``.
+
+ :param cache_type:
+
+ .. deprecated:: 0.6
+ Use the ``'type'`` argument in the ``cache_args`` dictionary.
+ See :ref:`caching_toplevel`.
+
+ :param cache_url:
+
+ .. deprecated:: 0.6
+ Use the ``'url'`` argument in the ``cache_args`` dictionary.
+ See :ref:`caching_toplevel`.
+
+ :param default_filters: List of string filter names that will
+ be applied to all expressions. See :ref:`filtering_default_filters`.
+
+ :param disable_unicode: Disables all awareness of Python Unicode
+ objects. See :ref:`unicode_disabled`.
+
+ :param enable_loop: When ``True``, enable the ``loop`` context variable.
+ This can be set to ``False`` to support templates that may
+ be making usage of the name "``loop``". Individual templates can
+ re-enable the "loop" context by placing the directive
+ ``enable_loop="True"`` inside the ``<%page>`` tag -- see
+ :ref:`migrating_loop`.
+
+ :param encoding_errors: Error parameter passed to ``encode()`` when
+ string encoding is performed. See :ref:`usage_unicode`.
+
+ :param error_handler: Python callable which is called whenever
+ compile or runtime exceptions occur. The callable is passed
+ the current context as well as the exception. If the
+ callable returns ``True``, the exception is considered to
+ be handled, else it is re-raised after the function
+ completes. Is used to provide custom error-rendering
+ functions.
+
+ .. seealso::
+
+ :paramref:`.Template.include_error_handler` - include-specific
+ error handler function
+
+ :param format_exceptions: if ``True``, exceptions which occur during
+ the render phase of this template will be caught and
+ formatted into an HTML error page, which then becomes the
+ rendered result of the :meth:`.render` call. Otherwise,
+ runtime exceptions are propagated outwards.
+
+ :param imports: String list of Python statements, typically individual
+ "import" lines, which will be placed into the module level
+ preamble of all generated Python modules. See the example
+ in :ref:`filtering_default_filters`.
+
+ :param future_imports: String list of names to import from `__future__`.
+ These will be concatenated into a comma-separated string and inserted
+ into the beginning of the template, e.g. ``futures_imports=['FOO',
+ 'BAR']`` results in ``from __future__ import FOO, BAR``. If you're
+ interested in using features like the new division operator, you must
+ use future_imports to convey that to the renderer, as otherwise the
+ import will not appear as the first executed statement in the generated
+ code and will therefore not have the desired effect.
+
+ :param include_error_handler: An error handler that runs when this template
+ is included within another one via the ``<%include>`` tag, and raises an
+ error. Compare to the :paramref:`.Template.error_handler` option.
+
+ .. versionadded:: 1.0.6
+
+ .. seealso::
+
+ :paramref:`.Template.error_handler` - top-level error handler function
+
+ :param input_encoding: Encoding of the template's source code. Can
+ be used in lieu of the coding comment. See
+ :ref:`usage_unicode` as well as :ref:`unicode_toplevel` for
+ details on source encoding.
+
+ :param lookup: a :class:`.TemplateLookup` instance that will be used
+ for all file lookups via the ``<%namespace>``,
+ ``<%include>``, and ``<%inherit>`` tags. See
+ :ref:`usage_templatelookup`.
+
+ :param module_directory: Filesystem location where generated
+ Python module files will be placed.
+
+ :param module_filename: Overrides the filename of the generated
+ Python module file. For advanced usage only.
+
+ :param module_writer: A callable which overrides how the Python
+ module is written entirely. The callable is passed the
+ encoded source content of the module and the destination
+ path to be written to. The default behavior of module writing
+ uses a tempfile in conjunction with a file move in order
+ to make the operation atomic. So a user-defined module
+ writing function that mimics the default behavior would be:
+
+ .. sourcecode:: python
+
+ import tempfile
+ import os
+ import shutil
+
+ def module_writer(source, outputpath):
+ (dest, name) = \\
+ tempfile.mkstemp(
+ dir=os.path.dirname(outputpath)
+ )
+
+ os.write(dest, source)
+ os.close(dest)
+ shutil.move(name, outputpath)
+
+ from mako.template import Template
+ mytemplate = Template(
+ filename="index.html",
+ module_directory="/path/to/modules",
+ module_writer=module_writer
+ )
+
+ The function is provided for unusual configurations where
+ certain platform-specific permissions or other special
+ steps are needed.
+
+ :param output_encoding: The encoding to use when :meth:`.render`
+ is called.
+ See :ref:`usage_unicode` as well as :ref:`unicode_toplevel`.
+
+ :param preprocessor: Python callable which will be passed
+ the full template source before it is parsed. The return
+ result of the callable will be used as the template source
+ code.
+
+ :param lexer_cls: A :class:`.Lexer` class used to parse
+ the template. The :class:`.Lexer` class is used by
+ default.
+
+ .. versionadded:: 0.7.4
+
+ :param strict_undefined: Replaces the automatic usage of
+ ``UNDEFINED`` for any undeclared variables not located in
+ the :class:`.Context` with an immediate raise of
+ ``NameError``. The advantage is immediate reporting of
+ missing variables which include the name.
+
+ .. versionadded:: 0.3.6
+
+ :param uri: string URI or other identifier for this template.
+ If not provided, the ``uri`` is generated from the filesystem
+ path, or from the in-memory identity of a non-file-based
+ template. The primary usage of the ``uri`` is to provide a key
+ within :class:`.TemplateLookup`, as well as to generate the
+ file path of the generated Python module file, if
+ ``module_directory`` is specified.
+
+ """
+
+ lexer_cls = Lexer
+
+ def __init__(
+ self,
+ text=None,
+ filename=None,
+ uri=None,
+ format_exceptions=False,
+ error_handler=None,
+ lookup=None,
+ output_encoding=None,
+ encoding_errors="strict",
+ module_directory=None,
+ cache_args=None,
+ cache_impl="beaker",
+ cache_enabled=True,
+ cache_type=None,
+ cache_dir=None,
+ cache_url=None,
+ module_filename=None,
+ input_encoding=None,
+ disable_unicode=False,
+ module_writer=None,
+ bytestring_passthrough=False,
+ default_filters=None,
+ buffer_filters=(),
+ strict_undefined=False,
+ imports=None,
+ future_imports=None,
+ enable_loop=True,
+ preprocessor=None,
+ lexer_cls=None,
+ include_error_handler=None,
+ ):
+ if uri:
+ self.module_id = re.sub(r"\W", "_", uri)
+ self.uri = uri
+ elif filename:
+ self.module_id = re.sub(r"\W", "_", filename)
+ drive, path = os.path.splitdrive(filename)
+ path = os.path.normpath(path).replace(os.path.sep, "/")
+ self.uri = path
+ else:
+ self.module_id = "memory:" + hex(id(self))
+ self.uri = self.module_id
+
+ u_norm = self.uri
+ if u_norm.startswith("/"):
+ u_norm = u_norm[1:]
+ u_norm = os.path.normpath(u_norm)
+ if u_norm.startswith(".."):
+ raise exceptions.TemplateLookupException(
+ 'Template uri "%s" is invalid - '
+ "it cannot be relative outside "
+ "of the root path." % self.uri
+ )
+
+ self.input_encoding = input_encoding
+ self.output_encoding = output_encoding
+ self.encoding_errors = encoding_errors
+ self.disable_unicode = disable_unicode
+ self.bytestring_passthrough = bytestring_passthrough or disable_unicode
+ self.enable_loop = enable_loop
+ self.strict_undefined = strict_undefined
+ self.module_writer = module_writer
+
+ if compat.py3k and disable_unicode:
+ raise exceptions.UnsupportedError(
+ "Mako for Python 3 does not " "support disabling Unicode"
+ )
+ elif output_encoding and disable_unicode:
+ raise exceptions.UnsupportedError(
+ "output_encoding must be set to "
+ "None when disable_unicode is used."
+ )
+ if default_filters is None:
+ if compat.py3k or self.disable_unicode:
+ self.default_filters = ["str"]
+ else:
+ self.default_filters = ["unicode"]
+ else:
+ self.default_filters = default_filters
+ self.buffer_filters = buffer_filters
+
+ self.imports = imports
+ self.future_imports = future_imports
+ self.preprocessor = preprocessor
+
+ if lexer_cls is not None:
+ self.lexer_cls = lexer_cls
+
+ # if plain text, compile code in memory only
+ if text is not None:
+ (code, module) = _compile_text(self, text, filename)
+ self._code = code
+ self._source = text
+ ModuleInfo(module, None, self, filename, code, text, uri)
+ elif filename is not None:
+ # if template filename and a module directory, load
+ # a filesystem-based module file, generating if needed
+ if module_filename is not None:
+ path = module_filename
+ elif module_directory is not None:
+ path = os.path.abspath(
+ os.path.join(
+ os.path.normpath(module_directory), u_norm + ".py"
+ )
+ )
+ else:
+ path = None
+ module = self._compile_from_file(path, filename)
+ else:
+ raise exceptions.RuntimeException(
+ "Template requires text or filename"
+ )
+
+ self.module = module
+ self.filename = filename
+ self.callable_ = self.module.render_body
+ self.format_exceptions = format_exceptions
+ self.error_handler = error_handler
+ self.include_error_handler = include_error_handler
+ self.lookup = lookup
+
+ self.module_directory = module_directory
+
+ self._setup_cache_args(
+ cache_impl,
+ cache_enabled,
+ cache_args,
+ cache_type,
+ cache_dir,
+ cache_url,
+ )
+
+ @util.memoized_property
+ def reserved_names(self):
+ if self.enable_loop:
+ return codegen.RESERVED_NAMES
+ else:
+ return codegen.RESERVED_NAMES.difference(["loop"])
+
+ def _setup_cache_args(
+ self,
+ cache_impl,
+ cache_enabled,
+ cache_args,
+ cache_type,
+ cache_dir,
+ cache_url,
+ ):
+ self.cache_impl = cache_impl
+ self.cache_enabled = cache_enabled
+ if cache_args:
+ self.cache_args = cache_args
+ else:
+ self.cache_args = {}
+
+ # transfer deprecated cache_* args
+ if cache_type:
+ self.cache_args["type"] = cache_type
+ if cache_dir:
+ self.cache_args["dir"] = cache_dir
+ if cache_url:
+ self.cache_args["url"] = cache_url
+
+ def _compile_from_file(self, path, filename):
+ if path is not None:
+ util.verify_directory(os.path.dirname(path))
+ filemtime = os.stat(filename)[stat.ST_MTIME]
+ if (
+ not os.path.exists(path)
+ or os.stat(path)[stat.ST_MTIME] < filemtime
+ ):
+ data = util.read_file(filename)
+ _compile_module_file(
+ self, data, filename, path, self.module_writer
+ )
+ module = compat.load_module(self.module_id, path)
+ del sys.modules[self.module_id]
+ if module._magic_number != codegen.MAGIC_NUMBER:
+ data = util.read_file(filename)
+ _compile_module_file(
+ self, data, filename, path, self.module_writer
+ )
+ module = compat.load_module(self.module_id, path)
+ del sys.modules[self.module_id]
+ ModuleInfo(module, path, self, filename, None, None, None)
+ else:
+ # template filename and no module directory, compile code
+ # in memory
+ data = util.read_file(filename)
+ code, module = _compile_text(self, data, filename)
+ self._source = None
+ self._code = code
+ ModuleInfo(module, None, self, filename, code, None, None)
+ return module
+
+ @property
+ def source(self):
+ """Return the template source code for this :class:`.Template`."""
+
+ return _get_module_info_from_callable(self.callable_).source
+
+ @property
+ def code(self):
+ """Return the module source code for this :class:`.Template`."""
+
+ return _get_module_info_from_callable(self.callable_).code
+
+ @util.memoized_property
+ def cache(self):
+ return cache.Cache(self)
+
+ @property
+ def cache_dir(self):
+ return self.cache_args["dir"]
+
+ @property
+ def cache_url(self):
+ return self.cache_args["url"]
+
+ @property
+ def cache_type(self):
+ return self.cache_args["type"]
+
+ def render(self, *args, **data):
+ """Render the output of this template as a string.
+
+ If the template specifies an output encoding, the string
+ will be encoded accordingly, else the output is raw (raw
+ output uses `cStringIO` and can't handle multibyte
+ characters). A :class:`.Context` object is created corresponding
+ to the given data. Arguments that are explicitly declared
+ by this template's internal rendering method are also
+ pulled from the given ``*args``, ``**data`` members.
+
+ """
+ return runtime._render(self, self.callable_, args, data)
+
+ def render_unicode(self, *args, **data):
+ """Render the output of this template as a unicode object."""
+
+ return runtime._render(
+ self, self.callable_, args, data, as_unicode=True
+ )
+
+ def render_context(self, context, *args, **kwargs):
+ """Render this :class:`.Template` with the given context.
+
+ The data is written to the context's buffer.
+
+ """
+ if getattr(context, "_with_template", None) is None:
+ context._set_with_template(self)
+ runtime._render_context(self, self.callable_, context, *args, **kwargs)
+
+ def has_def(self, name):
+ return hasattr(self.module, "render_%s" % name)
+
+ def get_def(self, name):
+ """Return a def of this template as a :class:`.DefTemplate`."""
+
+ return DefTemplate(self, getattr(self.module, "render_%s" % name))
+
+ def list_defs(self):
+ """return a list of defs in the template.
+
+ .. versionadded:: 1.0.4
+
+ """
+ return [i[7:] for i in dir(self.module) if i[:7] == "render_"]
+
+ def _get_def_callable(self, name):
+ return getattr(self.module, "render_%s" % name)
+
+ @property
+ def last_modified(self):
+ return self.module._modified_time
+
+
+class ModuleTemplate(Template):
+
+ """A Template which is constructed given an existing Python module.
+
+ e.g.::
+
+ t = Template("this is a template")
+ f = file("mymodule.py", "w")
+ f.write(t.code)
+ f.close()
+
+ import mymodule
+
+ t = ModuleTemplate(mymodule)
+ print(t.render())
+
+ """
+
+ def __init__(
+ self,
+ module,
+ module_filename=None,
+ template=None,
+ template_filename=None,
+ module_source=None,
+ template_source=None,
+ output_encoding=None,
+ encoding_errors="strict",
+ disable_unicode=False,
+ bytestring_passthrough=False,
+ format_exceptions=False,
+ error_handler=None,
+ lookup=None,
+ cache_args=None,
+ cache_impl="beaker",
+ cache_enabled=True,
+ cache_type=None,
+ cache_dir=None,
+ cache_url=None,
+ include_error_handler=None,
+ ):
+ self.module_id = re.sub(r"\W", "_", module._template_uri)
+ self.uri = module._template_uri
+ self.input_encoding = module._source_encoding
+ self.output_encoding = output_encoding
+ self.encoding_errors = encoding_errors
+ self.disable_unicode = disable_unicode
+ self.bytestring_passthrough = bytestring_passthrough or disable_unicode
+ self.enable_loop = module._enable_loop
+
+ if compat.py3k and disable_unicode:
+ raise exceptions.UnsupportedError(
+ "Mako for Python 3 does not " "support disabling Unicode"
+ )
+ elif output_encoding and disable_unicode:
+ raise exceptions.UnsupportedError(
+ "output_encoding must be set to "
+ "None when disable_unicode is used."
+ )
+
+ self.module = module
+ self.filename = template_filename
+ ModuleInfo(
+ module,
+ module_filename,
+ self,
+ template_filename,
+ module_source,
+ template_source,
+ module._template_uri,
+ )
+
+ self.callable_ = self.module.render_body
+ self.format_exceptions = format_exceptions
+ self.error_handler = error_handler
+ self.include_error_handler = include_error_handler
+ self.lookup = lookup
+ self._setup_cache_args(
+ cache_impl,
+ cache_enabled,
+ cache_args,
+ cache_type,
+ cache_dir,
+ cache_url,
+ )
+
+
+class DefTemplate(Template):
+
+ """A :class:`.Template` which represents a callable def in a parent
+ template."""
+
+ def __init__(self, parent, callable_):
+ self.parent = parent
+ self.callable_ = callable_
+ self.output_encoding = parent.output_encoding
+ self.module = parent.module
+ self.encoding_errors = parent.encoding_errors
+ self.format_exceptions = parent.format_exceptions
+ self.error_handler = parent.error_handler
+ self.include_error_handler = parent.include_error_handler
+ self.enable_loop = parent.enable_loop
+ self.lookup = parent.lookup
+ self.bytestring_passthrough = parent.bytestring_passthrough
+
+ def get_def(self, name):
+ return self.parent.get_def(name)
+
+
+class ModuleInfo(object):
+
+ """Stores information about a module currently loaded into
+ memory, provides reverse lookups of template source, module
+ source code based on a module's identifier.
+
+ """
+
+ _modules = weakref.WeakValueDictionary()
+
+ def __init__(
+ self,
+ module,
+ module_filename,
+ template,
+ template_filename,
+ module_source,
+ template_source,
+ template_uri,
+ ):
+ self.module = module
+ self.module_filename = module_filename
+ self.template_filename = template_filename
+ self.module_source = module_source
+ self.template_source = template_source
+ self.template_uri = template_uri
+ self._modules[module.__name__] = template._mmarker = self
+ if module_filename:
+ self._modules[module_filename] = self
+
+ @classmethod
+ def get_module_source_metadata(cls, module_source, full_line_map=False):
+ source_map = re.search(
+ r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", module_source, re.S
+ ).group(1)
+ source_map = json.loads(source_map)
+ source_map["line_map"] = dict(
+ (int(k), int(v)) for k, v in source_map["line_map"].items()
+ )
+ if full_line_map:
+ f_line_map = source_map["full_line_map"] = []
+ line_map = source_map["line_map"]
+
+ curr_templ_line = 1
+ for mod_line in range(1, max(line_map)):
+ if mod_line in line_map:
+ curr_templ_line = line_map[mod_line]
+ f_line_map.append(curr_templ_line)
+ return source_map
+
+ @property
+ def code(self):
+ if self.module_source is not None:
+ return self.module_source
+ else:
+ return util.read_python_file(self.module_filename)
+
+ @property
+ def source(self):
+ if self.template_source is not None:
+ if self.module._source_encoding and not isinstance(
+ self.template_source, compat.text_type
+ ):
+ return self.template_source.decode(
+ self.module._source_encoding
+ )
+ else:
+ return self.template_source
+ else:
+ data = util.read_file(self.template_filename)
+ if self.module._source_encoding:
+ return data.decode(self.module._source_encoding)
+ else:
+ return data
+
+
+def _compile(template, text, filename, generate_magic_comment):
+ lexer = template.lexer_cls(
+ text,
+ filename,
+ disable_unicode=template.disable_unicode,
+ input_encoding=template.input_encoding,
+ preprocessor=template.preprocessor,
+ )
+ node = lexer.parse()
+ source = codegen.compile(
+ node,
+ template.uri,
+ filename,
+ default_filters=template.default_filters,
+ buffer_filters=template.buffer_filters,
+ imports=template.imports,
+ future_imports=template.future_imports,
+ source_encoding=lexer.encoding,
+ generate_magic_comment=generate_magic_comment,
+ disable_unicode=template.disable_unicode,
+ strict_undefined=template.strict_undefined,
+ enable_loop=template.enable_loop,
+ reserved_names=template.reserved_names,
+ )
+ return source, lexer
+
+
+def _compile_text(template, text, filename):
+ identifier = template.module_id
+ source, lexer = _compile(
+ template,
+ text,
+ filename,
+ generate_magic_comment=template.disable_unicode,
+ )
+
+ cid = identifier
+ if not compat.py3k and isinstance(cid, compat.text_type):
+ cid = cid.encode()
+ module = types.ModuleType(cid)
+ code = compile(source, cid, "exec")
+
+ # this exec() works for 2.4->3.3.
+ exec(code, module.__dict__, module.__dict__)
+ return (source, module)
+
+
+def _compile_module_file(template, text, filename, outputpath, module_writer):
+ source, lexer = _compile(
+ template, text, filename, generate_magic_comment=True
+ )
+
+ if isinstance(source, compat.text_type):
+ source = source.encode(lexer.encoding or "ascii")
+
+ if module_writer:
+ module_writer(source, outputpath)
+ else:
+ # make tempfiles in the same location as the ultimate
+ # location. this ensures they're on the same filesystem,
+ # avoiding synchronization issues.
+ (dest, name) = tempfile.mkstemp(dir=os.path.dirname(outputpath))
+
+ os.write(dest, source)
+ os.close(dest)
+ shutil.move(name, outputpath)
+
+
+def _get_module_info_from_callable(callable_):
+ if compat.py3k:
+ return _get_module_info(callable_.__globals__["__name__"])
+ else:
+ return _get_module_info(callable_.func_globals["__name__"])
+
+
+def _get_module_info(filename):
+ return ModuleInfo._modules[filename]
diff --git a/third_party/python/Mako/mako/util.py b/third_party/python/Mako/mako/util.py
new file mode 100644
index 0000000000..16e3c72658
--- /dev/null
+++ b/third_party/python/Mako/mako/util.py
@@ -0,0 +1,400 @@
+# mako/util.py
+# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+from __future__ import absolute_import
+
+from ast import parse
+import codecs
+import collections
+import operator
+import os
+import re
+import timeit
+
+from mako import compat
+
+
+def update_wrapper(decorated, fn):
+ decorated.__wrapped__ = fn
+ decorated.__name__ = fn.__name__
+ return decorated
+
+
+class PluginLoader(object):
+ def __init__(self, group):
+ self.group = group
+ self.impls = {}
+
+ def load(self, name):
+ if name in self.impls:
+ return self.impls[name]()
+ else:
+ import pkg_resources
+
+ for impl in pkg_resources.iter_entry_points(self.group, name):
+ self.impls[name] = impl.load
+ return impl.load()
+ else:
+ from mako import exceptions
+
+ raise exceptions.RuntimeException(
+ "Can't load plugin %s %s" % (self.group, name)
+ )
+
+ def register(self, name, modulepath, objname):
+ def load():
+ mod = __import__(modulepath)
+ for token in modulepath.split(".")[1:]:
+ mod = getattr(mod, token)
+ return getattr(mod, objname)
+
+ self.impls[name] = load
+
+
+def verify_directory(dir_):
+ """create and/or verify a filesystem directory."""
+
+ tries = 0
+
+ while not os.path.exists(dir_):
+ try:
+ tries += 1
+ os.makedirs(dir_, compat.octal("0775"))
+ except:
+ if tries > 5:
+ raise
+
+
+def to_list(x, default=None):
+ if x is None:
+ return default
+ if not isinstance(x, (list, tuple)):
+ return [x]
+ else:
+ return x
+
+
+class memoized_property(object):
+
+ """A read-only @property that is only evaluated once."""
+
+ def __init__(self, fget, doc=None):
+ self.fget = fget
+ self.__doc__ = doc or fget.__doc__
+ self.__name__ = fget.__name__
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ obj.__dict__[self.__name__] = result = self.fget(obj)
+ return result
+
+
+class memoized_instancemethod(object):
+
+ """Decorate a method memoize its return value.
+
+ Best applied to no-arg methods: memoization is not sensitive to
+ argument values, and will always return the same value even when
+ called with different arguments.
+
+ """
+
+ def __init__(self, fget, doc=None):
+ self.fget = fget
+ self.__doc__ = doc or fget.__doc__
+ self.__name__ = fget.__name__
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+
+ def oneshot(*args, **kw):
+ result = self.fget(obj, *args, **kw)
+
+ def memo(*a, **kw):
+ return result
+
+ memo.__name__ = self.__name__
+ memo.__doc__ = self.__doc__
+ obj.__dict__[self.__name__] = memo
+ return result
+
+ oneshot.__name__ = self.__name__
+ oneshot.__doc__ = self.__doc__
+ return oneshot
+
+
+class SetLikeDict(dict):
+
+ """a dictionary that has some setlike methods on it"""
+
+ def union(self, other):
+ """produce a 'union' of this dict and another (at the key level).
+
+ values in the second dict take precedence over that of the first"""
+ x = SetLikeDict(**self)
+ x.update(other)
+ return x
+
+
+class FastEncodingBuffer(object):
+
+ """a very rudimentary buffer that is faster than StringIO,
+ but doesn't crash on unicode data like cStringIO."""
+
+ def __init__(self, encoding=None, errors="strict", as_unicode=False):
+ self.data = collections.deque()
+ self.encoding = encoding
+ if as_unicode:
+ self.delim = compat.u("")
+ else:
+ self.delim = ""
+ self.as_unicode = as_unicode
+ self.errors = errors
+ self.write = self.data.append
+
+ def truncate(self):
+ self.data = collections.deque()
+ self.write = self.data.append
+
+ def getvalue(self):
+ if self.encoding:
+ return self.delim.join(self.data).encode(
+ self.encoding, self.errors
+ )
+ else:
+ return self.delim.join(self.data)
+
+
+class LRUCache(dict):
+
+ """A dictionary-like object that stores a limited number of items,
+ discarding lesser used items periodically.
+
+ this is a rewrite of LRUCache from Myghty to use a periodic timestamp-based
+ paradigm so that synchronization is not really needed. the size management
+ is inexact.
+ """
+
+ class _Item(object):
+ def __init__(self, key, value):
+ self.key = key
+ self.value = value
+ self.timestamp = timeit.default_timer()
+
+ def __repr__(self):
+ return repr(self.value)
+
+ def __init__(self, capacity, threshold=0.5):
+ self.capacity = capacity
+ self.threshold = threshold
+
+ def __getitem__(self, key):
+ item = dict.__getitem__(self, key)
+ item.timestamp = timeit.default_timer()
+ return item.value
+
+ def values(self):
+ return [i.value for i in dict.values(self)]
+
+ def setdefault(self, key, value):
+ if key in self:
+ return self[key]
+ else:
+ self[key] = value
+ return value
+
+ def __setitem__(self, key, value):
+ item = dict.get(self, key)
+ if item is None:
+ item = self._Item(key, value)
+ dict.__setitem__(self, key, item)
+ else:
+ item.value = value
+ self._manage_size()
+
+ def _manage_size(self):
+ while len(self) > self.capacity + self.capacity * self.threshold:
+ bytime = sorted(
+ dict.values(self),
+ key=operator.attrgetter("timestamp"),
+ reverse=True,
+ )
+ for item in bytime[self.capacity :]:
+ try:
+ del self[item.key]
+ except KeyError:
+ # if we couldn't find a key, most likely some other thread
+ # broke in on us. loop around and try again
+ break
+
+
+# Regexp to match python magic encoding line
+_PYTHON_MAGIC_COMMENT_re = re.compile(
+ r"[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)", re.VERBOSE
+)
+
+
+def parse_encoding(fp):
+ """Deduce the encoding of a Python source file (binary mode) from magic
+ comment.
+
+ It does this in the same way as the `Python interpreter`__
+
+ .. __: http://docs.python.org/ref/encodings.html
+
+ The ``fp`` argument should be a seekable file object in binary mode.
+ """
+ pos = fp.tell()
+ fp.seek(0)
+ try:
+ line1 = fp.readline()
+ has_bom = line1.startswith(codecs.BOM_UTF8)
+ if has_bom:
+ line1 = line1[len(codecs.BOM_UTF8) :]
+
+ m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode("ascii", "ignore"))
+ if not m:
+ try:
+ parse(line1.decode("ascii", "ignore"))
+ except (ImportError, SyntaxError):
+ # Either it's a real syntax error, in which case the source
+ # is not valid python source, or line2 is a continuation of
+ # line1, in which case we don't want to scan line2 for a magic
+ # comment.
+ pass
+ else:
+ line2 = fp.readline()
+ m = _PYTHON_MAGIC_COMMENT_re.match(
+ line2.decode("ascii", "ignore")
+ )
+
+ if has_bom:
+ if m:
+ raise SyntaxError(
+ "python refuses to compile code with both a UTF8"
+ " byte-order-mark and a magic encoding comment"
+ )
+ return "utf_8"
+ elif m:
+ return m.group(1)
+ else:
+ return None
+ finally:
+ fp.seek(pos)
+
+
+def sorted_dict_repr(d):
+ """repr() a dictionary with the keys in order.
+
+ Used by the lexer unit test to compare parse trees based on strings.
+
+ """
+ keys = list(d.keys())
+ keys.sort()
+ return "{" + ", ".join(["%r: %r" % (k, d[k]) for k in keys]) + "}"
+
+
+def restore__ast(_ast):
+ """Attempt to restore the required classes to the _ast module if it
+ appears to be missing them
+ """
+ if hasattr(_ast, "AST"):
+ return
+ _ast.PyCF_ONLY_AST = 2 << 9
+ m = compile(
+ """\
+def foo(): pass
+class Bar(object): pass
+if False: pass
+baz = 'mako'
+1 + 2 - 3 * 4 / 5
+6 // 7 % 8 << 9 >> 10
+11 & 12 ^ 13 | 14
+15 and 16 or 17
+-baz + (not +18) - ~17
+baz and 'foo' or 'bar'
+(mako is baz == baz) is not baz != mako
+mako > baz < mako >= baz <= mako
+mako in baz not in mako""",
+ "<unknown>",
+ "exec",
+ _ast.PyCF_ONLY_AST,
+ )
+ _ast.Module = type(m)
+
+ for cls in _ast.Module.__mro__:
+ if cls.__name__ == "mod":
+ _ast.mod = cls
+ elif cls.__name__ == "AST":
+ _ast.AST = cls
+
+ _ast.FunctionDef = type(m.body[0])
+ _ast.ClassDef = type(m.body[1])
+ _ast.If = type(m.body[2])
+
+ _ast.Name = type(m.body[3].targets[0])
+ _ast.Store = type(m.body[3].targets[0].ctx)
+ _ast.Str = type(m.body[3].value)
+
+ _ast.Sub = type(m.body[4].value.op)
+ _ast.Add = type(m.body[4].value.left.op)
+ _ast.Div = type(m.body[4].value.right.op)
+ _ast.Mult = type(m.body[4].value.right.left.op)
+
+ _ast.RShift = type(m.body[5].value.op)
+ _ast.LShift = type(m.body[5].value.left.op)
+ _ast.Mod = type(m.body[5].value.left.left.op)
+ _ast.FloorDiv = type(m.body[5].value.left.left.left.op)
+
+ _ast.BitOr = type(m.body[6].value.op)
+ _ast.BitXor = type(m.body[6].value.left.op)
+ _ast.BitAnd = type(m.body[6].value.left.left.op)
+
+ _ast.Or = type(m.body[7].value.op)
+ _ast.And = type(m.body[7].value.values[0].op)
+
+ _ast.Invert = type(m.body[8].value.right.op)
+ _ast.Not = type(m.body[8].value.left.right.op)
+ _ast.UAdd = type(m.body[8].value.left.right.operand.op)
+ _ast.USub = type(m.body[8].value.left.left.op)
+
+ _ast.Or = type(m.body[9].value.op)
+ _ast.And = type(m.body[9].value.values[0].op)
+
+ _ast.IsNot = type(m.body[10].value.ops[0])
+ _ast.NotEq = type(m.body[10].value.ops[1])
+ _ast.Is = type(m.body[10].value.left.ops[0])
+ _ast.Eq = type(m.body[10].value.left.ops[1])
+
+ _ast.Gt = type(m.body[11].value.ops[0])
+ _ast.Lt = type(m.body[11].value.ops[1])
+ _ast.GtE = type(m.body[11].value.ops[2])
+ _ast.LtE = type(m.body[11].value.ops[3])
+
+ _ast.In = type(m.body[12].value.ops[0])
+ _ast.NotIn = type(m.body[12].value.ops[1])
+
+
+def read_file(path, mode="rb"):
+ fp = open(path, mode)
+ try:
+ data = fp.read()
+ return data
+ finally:
+ fp.close()
+
+
+def read_python_file(path):
+ fp = open(path, "rb")
+ try:
+ encoding = parse_encoding(fp)
+ data = fp.read()
+ if encoding:
+ data = data.decode(encoding)
+ return data
+ finally:
+ fp.close()
diff --git a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/AUTHORS.md b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/AUTHORS.md
index 525116ee7e..525116ee7e 100644
--- a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/AUTHORS.md
+++ b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/AUTHORS.md
diff --git a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/LICENSE b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/LICENSE
index a612ad9813..a612ad9813 100644
--- a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/LICENSE
+++ b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/LICENSE
diff --git a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/METADATA b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/METADATA
index 201d8bb48b..1e31df3dd4 100644
--- a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/METADATA
+++ b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: glean-parser
-Version: 11.0.1
+Version: 13.0.0
Summary: Parser tools for Mozilla's Glean telemetry
Home-page: https://github.com/mozilla/glean_parser
Author: The Glean Team
@@ -79,6 +79,23 @@ $ glean_parser check < ping.json
## Unreleased
+## 13.0.0
+
+- BREAKING CHANGE: Support metadata field `include_info_sections` ([bug 1866559](https://bugzilla.mozilla.org/show_bug.cgi?id=1866559))
+
+## 12.0.1
+
+- Fix Rust codegen for object metric type ([#662](https://github.com/mozilla/glean_parser/pull/662))
+
+## 12.0.0
+
+- Add new metric type object (only Rust codegen support right now) ([#587](https://github.com/mozilla/glean_parser/pull/587))
+
+## 11.1.0
+
+- Add Go log outputter (`go_server`) ([#645](https://github.com/mozilla/glean_parser/pull/645))
+- Add Python log outputter (`python_server`) ([MPP-3642](https://mozilla-hub.atlassian.net/browse/MPP-3642))
+
## 11.0.1
- Fix javascript_server template to include non-event metric parameters in #record call for event metrics ([#643](https://github.com/mozilla/glean_parser/pull/643))
diff --git a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/RECORD b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/RECORD
index 417484d30b..62e4bb6fbb 100644
--- a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/RECORD
+++ b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/RECORD
@@ -2,28 +2,31 @@ glean_parser/__init__.py,sha256=bJljD052_0y-efcBhYpllICVCXOMHLcXRLNyrvfgt5A,533
glean_parser/__main__.py,sha256=Rw0PpuQtAvdHJMK1YLozeZkc6x1yjeNZwidu4faovdk,8633
glean_parser/coverage.py,sha256=2IwC4XMDtDamMkBFoYilmqJzW4gyypq65YVCur8SNas,4405
glean_parser/data_review.py,sha256=BweeeTkNNS6HrIDkztawhbDByrk_-Avxpg7YeST3VAs,2152
+glean_parser/go_server.py,sha256=SCcGrjRktlPyl79LbjIvtBeCNYVOXOW4Q8xkuks0bzE,5345
glean_parser/javascript.py,sha256=w4ZhNBHBKWYk0h3t7G0Ud2tR__hRqzn9dlEXNKLdQrA,11230
glean_parser/javascript_server.py,sha256=SDV9tPL1uZMyS1VSyo5lOFuNPFHZu-PZxr1vhND-GzM,7971
glean_parser/kotlin.py,sha256=5z8_74xlqvHDsedwZhGf1_qb7swPEgIZumkJIuj3ef8,12598
glean_parser/lint.py,sha256=STqdgyOhR4Q3fHivSizgn9bOOyqrNHhzjaqyJxz6qzI,19948
glean_parser/markdown.py,sha256=GkCr1CrV6mnRQseT6FO1-JJ7Eup8X3lxUfRMBTxXpe4,9066
-glean_parser/metrics.py,sha256=uWOJdZRB9udMni2EWXcL3P1T4pRIlJ5kiE5fJsHkmdU,12450
+glean_parser/metrics.py,sha256=YAO8wPuRHTLkdT9M4zh9ZwoFI1_VS8O9oQqwZNYyDp0,14612
glean_parser/parser.py,sha256=cUOnvSXKfEBg8YTpRcWiPcMwpFpK1TTqsVO_zjUtpR4,15309
-glean_parser/pings.py,sha256=la9HdJTjtSqC7vc5-JuANW0otbozTnFARlIMgKoypGU,2982
+glean_parser/pings.py,sha256=AQ-fBmIx2GKQv6J2NyTFfHHZzSnApZZoC770LlstkoI,3180
+glean_parser/python_server.py,sha256=3ZsqeNJknKO9yvtBJWxe67JthzSMqNMuo9DfhgF2kvg,4790
glean_parser/ruby_server.py,sha256=-bNXjfXWwHWUHmLJVvfi6jCyw8q0MBwx9VXVWQ3bU-A,5189
-glean_parser/rust.py,sha256=PJzTfYWzAumJYCP5IYPc6fhS_Qa30Q8NTK9plg3sDnk,6744
+glean_parser/rust.py,sha256=UEHeIZlToxCBelfec5sl_l_uLZfk8f_OUXqa_ZoEvnk,7330
glean_parser/swift.py,sha256=T1BSGahd9wUd6VDeNC89SdN6M34jKXDlydMpSI0QLOs,8379
glean_parser/tags.py,sha256=bemKYvcbMO4JrghiNSe-A4BNNDtx_FlUPkgrPPJy84Y,1391
-glean_parser/translate.py,sha256=C7FY7AAbnVsPZOu2bKELW1CfTwnvLGpmgzY7uMDqOec,8233
+glean_parser/translate.py,sha256=luKQoraARZ2tjenHs0SVtCxflnYaMkzPYFfKEdKdSqQ,8403
glean_parser/translation_options.py,sha256=Lxzr6G7MP0tC_ZYlZXftS4j0SLiqO-5mGVTEc7ggXis,2037
-glean_parser/util.py,sha256=X5YFAU4kWdDJjMsJzXH-QJVSjUJc_qvXktiM-dJSfzo,16004
+glean_parser/util.py,sha256=KgvmjETOV1IIGD4hF_o5zcUDE-wp3SHxrNHM1niU0CM,16033
glean_parser/validate_ping.py,sha256=0TNvILH6dtzJDys3W8Kqorw6kk03me73OCUDtpoHcXU,2118
glean_parser/schemas/metrics.1-0-0.schema.yaml,sha256=cND3cvi6iBfPUVmtfIBQfGJV9AALpbvN7nu8E33_J-o,19566
-glean_parser/schemas/metrics.2-0-0.schema.yaml,sha256=sfrARxefWy1WN5HxUKjwjN8lGobbPds5l7Y46VHfP1g,25849
+glean_parser/schemas/metrics.2-0-0.schema.yaml,sha256=wx1q0L4C0-Vcwk1SPU6t8OfjDEQvgrwwEG6xfSHO1MI,26365
glean_parser/schemas/pings.1-0-0.schema.yaml,sha256=hwCnsKpEysmrmVp-QHGBArEkVY3vaU1rVsxlTwhAzws,4315
-glean_parser/schemas/pings.2-0-0.schema.yaml,sha256=l-nIuyXJ9-D0X_U6hzGVbhIBhtZDg-rGau-RDrhgpng,4705
+glean_parser/schemas/pings.2-0-0.schema.yaml,sha256=vDyvFT8KwAwaqyWHG4y6pFNrsc3NO7OyDDagA2eTeqM,5415
glean_parser/schemas/tags.1-0-0.schema.yaml,sha256=OGXIJlvvVW1vaqB_NVZnwKeZ-sLlfH57vjBSHbj6DNI,1231
glean_parser/templates/data_review.jinja2,sha256=jeYU29T1zLSyu9fKBBFu5BFPfIw8_hmOUXw8RXhRXK8,3287
+glean_parser/templates/go_server.jinja2,sha256=Jy1e0uQqr_WZNoj-AWnygRmygX2jyj_GQMMV8mSah2k,6825
glean_parser/templates/javascript.buildinfo.jinja2,sha256=4mXiZCQIk9if4lxlA05kpSIL4a95IdwGwqle2OqqNAs,474
glean_parser/templates/javascript.jinja2,sha256=cT_bG-jC6m4afECXmcsqHwiiHjRuVtJnfv90OD2Mwxw,2669
glean_parser/templates/javascript_server.jinja2,sha256=H991yQOKJMwSgM0bLEA-Q5Z15LWsfEPh6bTYz_owSCU,9423
@@ -31,14 +34,15 @@ glean_parser/templates/kotlin.buildinfo.jinja2,sha256=X0lk2SNu5OIIj2i6mUyF9CWFQI
glean_parser/templates/kotlin.geckoview.jinja2,sha256=MJOgtoDXmBjE9pwk-G6T89y36RZuMbDWM_-DBN_gFJo,5099
glean_parser/templates/kotlin.jinja2,sha256=3DqUMXJRkmTvSp_5IRyvGmw5iXYWdox7coMFe3YDxcc,5247
glean_parser/templates/markdown.jinja2,sha256=vAHHGGm28HRDPd3zO_wQMAUZIuxE9uQ7hl3NpXxcKV4,3425
+glean_parser/templates/python_server.jinja2,sha256=gu2C1rkn760IqBCG2SWaK7o32T1ify94wDEsudLPUg8,7260
glean_parser/templates/qmldir.jinja2,sha256=m6IGsp-tgTiOfQ7VN8XW6GqX0gJqJkt3B6Pkaul6FVo,156
glean_parser/templates/ruby_server.jinja2,sha256=vm4BEenOqzomQNTLFfMOzlWHARnsWUjTBbnR-v2cadI,6247
-glean_parser/templates/rust.jinja2,sha256=pdbjq_JGm8XWHsVXk0m2xZ5Pd-Y9T_zxJfZKBoT0ERU,3635
-glean_parser/templates/swift.jinja2,sha256=NfZdvrG8LGT4H2AWk-vB_GDTMcpW1XZJcApO4OF5AYE,4874
-glean_parser-11.0.1.dist-info/AUTHORS.md,sha256=yxgj8MioO4wUnrh0gmfb8l3DJJrf-l4HmmEDbQsbbNI,455
-glean_parser-11.0.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
-glean_parser-11.0.1.dist-info/METADATA,sha256=z5yLEYgY4EV1e_cHNQhenhkwK5ryURgljfTfaYK-NYs,30877
-glean_parser-11.0.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
-glean_parser-11.0.1.dist-info/entry_points.txt,sha256=mf9d3sv8BwSjjR58x9KDnpVkONCnv3fPQC2NjJl15Xg,68
-glean_parser-11.0.1.dist-info/top_level.txt,sha256=q7T3duD-9tYZFyDry6Wv2LcdMsK2jGnzdDFhxWcT2Z8,13
-glean_parser-11.0.1.dist-info/RECORD,,
+glean_parser/templates/rust.jinja2,sha256=wlV0OZvV3Mk2ulrqFkN1vGjdsahsupEy2TQvWxQKzww,5439
+glean_parser/templates/swift.jinja2,sha256=xkvVsTpfK0QK3tI32wGqzxm2hqFNaBQ6Y71rKIsCmAI,4944
+glean_parser-13.0.0.dist-info/AUTHORS.md,sha256=yxgj8MioO4wUnrh0gmfb8l3DJJrf-l4HmmEDbQsbbNI,455
+glean_parser-13.0.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
+glean_parser-13.0.0.dist-info/METADATA,sha256=BzYfW5GF-wZLrokfvUTiZg7JT5BTfB1E3xIDKW6h_BY,31493
+glean_parser-13.0.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
+glean_parser-13.0.0.dist-info/entry_points.txt,sha256=mf9d3sv8BwSjjR58x9KDnpVkONCnv3fPQC2NjJl15Xg,68
+glean_parser-13.0.0.dist-info/top_level.txt,sha256=q7T3duD-9tYZFyDry6Wv2LcdMsK2jGnzdDFhxWcT2Z8,13
+glean_parser-13.0.0.dist-info/RECORD,,
diff --git a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/WHEEL b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/WHEEL
index 98c0d20b7a..98c0d20b7a 100644
--- a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/WHEEL
+++ b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/WHEEL
diff --git a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/entry_points.txt b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/entry_points.txt
index 08fde9d655..08fde9d655 100644
--- a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/entry_points.txt
+++ b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/entry_points.txt
diff --git a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/top_level.txt b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/top_level.txt
index a7f3a37918..a7f3a37918 100644
--- a/third_party/python/glean_parser/glean_parser-11.0.1.dist-info/top_level.txt
+++ b/third_party/python/glean_parser/glean_parser-13.0.0.dist-info/top_level.txt
diff --git a/third_party/python/glean_parser/glean_parser/go_server.py b/third_party/python/glean_parser/glean_parser/go_server.py
new file mode 100644
index 0000000000..403a0d71f4
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/go_server.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Outputter to generate server Go code for collecting events.
+
+This outputter is different from the rest of the outputters in that the code it
+generates does not use the Glean SDK. It is meant to be used to collect events
+in server-side environments. In these environments SDK assumptions to measurement
+window and connectivity don't hold.
+Generated code takes care of assembling pings with metrics, and serializing to messages
+conforming to Glean schema.
+
+Warning: this outputter supports limited set of metrics,
+see `SUPPORTED_METRIC_TYPES` below.
+
+The generated code creates the following:
+* Two methods for logging an Event metric
+ one with and one without user request info specified
+"""
+from collections import defaultdict
+from pathlib import Path
+from typing import Any, Dict, Optional, List
+
+from . import __version__
+from . import metrics
+from . import util
+
+# Adding a metric here will require updating the `generate_metric_type` function
+# and require adjustments to `metrics` variables the the template.
+SUPPORTED_METRIC_TYPES = ["string", "quantity", "event"]
+
+
+def generate_event_type_name(metric: metrics.Metric) -> str:
+ return f"Event{util.Camelize(metric.category)}{util.Camelize(metric.name)}"
+
+
+def generate_metric_name(metric: metrics.Metric) -> str:
+ return f"{metric.category}.{metric.name}"
+
+
+def generate_extra_name(extra: str) -> str:
+ return util.Camelize(extra)
+
+
+def generate_metric_argument_name(metric: metrics.Metric) -> str:
+ return f"{util.Camelize(metric.category)}{util.Camelize(metric.name)}"
+
+
+def generate_metric_type(metric_type: str) -> str:
+ if metric_type == "quantity":
+ return "int64"
+ elif metric_type == "string":
+ return "string"
+ elif metric_type == "boolean":
+ return "bool"
+ else:
+ print("❌ Unable to generate Go type from metric type: " + metric_type)
+ exit
+ return "NONE"
+
+
+def clean_string(s: str) -> str:
+ return s.replace("\n", " ").rstrip()
+
+
+def output_go(
+ objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]]
+) -> None:
+ """
+ Given a tree of objects, output Go code to `output_dir`.
+
+ The output is a single file containing all the code for assembling pings with
+ metrics, serializing, and submitting.
+
+ :param objects: A tree of objects (metrics and pings) as returned from
+ `parser.parse_objects`.
+ :param output_dir: Path to an output directory to write to.
+ """
+
+ template = util.get_jinja2_template(
+ "go_server.jinja2",
+ filters=(
+ ("event_type_name", generate_event_type_name),
+ ("event_extra_name", generate_extra_name),
+ ("metric_name", generate_metric_name),
+ ("metric_argument_name", generate_metric_argument_name),
+ ("go_metric_type", generate_metric_type),
+ ("clean_string", clean_string),
+ ),
+ )
+
+ PING_METRIC_ERROR_MSG = (
+ " Server-side environment is simplified and only supports the events ping type."
+ + " You should not be including pings.yaml with your parser call"
+ + " or referencing any other pings in your metric configuration."
+ )
+ if "pings" in objs:
+ print("❌ Ping definition found." + PING_METRIC_ERROR_MSG)
+ return
+
+ # Go through all metrics in objs and build a map of
+ # ping->list of metric categories->list of metrics
+ # for easier processing in the template.
+ ping_to_metrics: Dict[str, Dict[str, List[metrics.Metric]]] = defaultdict(dict)
+ for _category_key, category_val in objs.items():
+ for _metric_name, metric in category_val.items():
+ if isinstance(metric, metrics.Metric):
+ if metric.type not in SUPPORTED_METRIC_TYPES:
+ print(
+ "❌ Ignoring unsupported metric type: "
+ + f"{metric.type}:{metric.name}."
+ + " Reach out to Glean team to add support for this"
+ + " metric type."
+ )
+ continue
+ for ping in metric.send_in_pings:
+ if ping != "events":
+ (
+ print(
+ "❌ Non-events ping reference found."
+ + PING_METRIC_ERROR_MSG
+ + f"Ignoring the {ping} ping type."
+ )
+ )
+ continue
+ metrics_by_type = ping_to_metrics[ping]
+ metrics_list = metrics_by_type.setdefault(metric.type, [])
+ metrics_list.append(metric)
+
+ if "event" not in ping_to_metrics["events"]:
+ print("❌ No event metrics found...at least one event metric is required")
+ return
+
+ extension = ".go"
+ filepath = output_dir / ("server_events" + extension)
+ with filepath.open("w", encoding="utf-8") as fd:
+ fd.write(
+ template.render(
+ parser_version=__version__, events_ping=ping_to_metrics["events"]
+ )
+ )
diff --git a/third_party/python/glean_parser/glean_parser/metrics.py b/third_party/python/glean_parser/glean_parser/metrics.py
index 5738239f97..accfbd763d 100644
--- a/third_party/python/glean_parser/glean_parser/metrics.py
+++ b/third_party/python/glean_parser/glean_parser/metrics.py
@@ -181,6 +181,7 @@ class Metric:
d.pop("unit")
d.pop("_config", None)
d.pop("_generate_enums", None)
+ d.pop("_generate_structure", None)
return d
def _serialize_input(self) -> Dict[str, util.JSONType]:
@@ -434,4 +435,63 @@ class Text(Metric):
typename = "text"
+class Object(Metric):
+ typename = "object"
+
+ def __init__(self, *args, **kwargs):
+ structure = kwargs.pop("structure", None)
+ if not structure:
+ raise ValueError("`object` is missing required parameter `structure`")
+
+ self._generate_structure = self.validate_structure(structure)
+ super().__init__(*args, **kwargs)
+
+ ALLOWED_TOPLEVEL = {"type", "properties", "items"}
+ ALLOWED_TYPES = ["object", "array", "number", "string", "boolean"]
+
+ @staticmethod
+ def _validate_substructure(structure):
+ extra = set(structure.keys()) - Object.ALLOWED_TOPLEVEL
+ if extra:
+ extra = ", ".join(extra)
+ allowed = ", ".join(Object.ALLOWED_TOPLEVEL)
+ raise ValueError(
+ f"Found additional fields: {extra}. Only allowed: {allowed}"
+ )
+
+ if "type" not in structure or structure["type"] not in Object.ALLOWED_TYPES:
+ raise ValueError("invalid or missing `type` in object structure")
+
+ if structure["type"] == "object":
+ if "items" in structure:
+ raise ValueError("`items` not allowed in object structure")
+
+ if "properties" not in structure:
+ raise ValueError("`properties` missing for type `object`")
+
+ for key in structure["properties"]:
+ value = structure["properties"][key]
+ structure["properties"][key] = Object._validate_substructure(value)
+
+ if structure["type"] == "array":
+ if "properties" in structure:
+ raise ValueError("`properties` not allowed in array structure")
+
+ if "items" not in structure:
+ raise ValueError("`items` missing for type `array`")
+
+ value = structure["items"]
+ structure["items"] = Object._validate_substructure(value)
+
+ return structure
+
+ @staticmethod
+ def validate_structure(structure):
+ if None:
+ raise ValueError("`structure` needed for object metric.")
+
+ structure = Object._validate_substructure(structure)
+ return structure
+
+
ObjectTree = Dict[str, Dict[str, Union[Metric, pings.Ping, tags.Tag]]]
diff --git a/third_party/python/glean_parser/glean_parser/pings.py b/third_party/python/glean_parser/glean_parser/pings.py
index 3099fa1d16..b4145ea68d 100644
--- a/third_party/python/glean_parser/glean_parser/pings.py
+++ b/third_party/python/glean_parser/glean_parser/pings.py
@@ -45,6 +45,7 @@ class Ping:
metadata = {}
self.metadata = metadata
self.precise_timestamps = self.metadata.get("precise_timestamps", True)
+ self.include_info_sections = self.metadata.get("include_info_sections", True)
if data_reviews is None:
data_reviews = []
self.data_reviews = data_reviews
@@ -90,6 +91,9 @@ class Ping:
d = self.serialize()
modified_dict = util.remove_output_params(d, "defined_in")
modified_dict = util.remove_output_params(modified_dict, "precise_timestamps")
+ modified_dict = util.remove_output_params(
+ modified_dict, "include_info_sections"
+ )
return modified_dict
def identifier(self) -> str:
diff --git a/third_party/python/glean_parser/glean_parser/python_server.py b/third_party/python/glean_parser/glean_parser/python_server.py
new file mode 100644
index 0000000000..8ead0eb315
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/python_server.py
@@ -0,0 +1,130 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Outputter to generate server Python code for collecting events.
+
+This outputter is different from the rest of the outputters in that the code it
+generates does not use the Glean SDK. It is meant to be used to collect events
+in server-side environments. In these environments SDK assumptions to measurement
+window and connectivity don't hold.
+Generated code takes care of assembling pings with metrics, and serializing to messages
+conforming to Glean schema.
+
+Warning: this outputter supports limited set of metrics,
+see `SUPPORTED_METRIC_TYPES` below.
+
+The generated code creates a `ServerEventLogger` class for each ping that has
+at least one event metric. The class has a `record` method for each event metric.
+"""
+from collections import defaultdict
+from pathlib import Path
+from typing import Any, Dict, Optional, List
+
+from . import __version__
+from . import metrics
+from . import util
+
+# Adding a metric here will require updating the `generate_metric_type` function
+# and require adjustments to `metrics` variables the the template.
+SUPPORTED_METRIC_TYPES = ["string", "quantity", "event"]
+
+
+def camelize(s: str) -> str:
+ return util.Camelize(s)
+
+
+def generate_metric_type(metric_type: str) -> str:
+ if metric_type == "quantity":
+ return "int"
+ elif metric_type == "string":
+ return "str"
+ elif metric_type == "boolean":
+ return "bool"
+ else:
+ print("❌ Unable to generate Python type from metric type: " + metric_type)
+ exit
+ return "NONE"
+
+
+def clean_string(s: str) -> str:
+ return s.replace("\n", " ").rstrip()
+
+
+def generate_ping_factory_method(ping: str) -> str:
+ return f"create_{util.snake_case(ping)}_server_event_logger"
+
+
+def generate_event_record_function_name(event_metric: metrics.Metric) -> str:
+ return (
+ f"record_{util.snake_case(event_metric.category)}_"
+ + f"{util.snake_case(event_metric.name)}"
+ )
+
+
+def output_python(
+ objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]]
+) -> None:
+ """
+ Given a tree of objects, output Python code to `output_dir`.
+
+ The output is a file containing all the code for assembling pings with
+ metrics, serializing, and submitting, and an empty `__init__.py` file to
+ make the directory a package.
+
+ :param objects: A tree of objects (metrics and pings) as returned from
+ `parser.parse_objects`.
+ :param output_dir: Path to an output directory to write to.
+ """
+
+ template = util.get_jinja2_template(
+ "python_server.jinja2",
+ filters=(
+ ("camelize", camelize),
+ ("py_metric_type", generate_metric_type),
+ ("clean_string", clean_string),
+ ("factory_method", generate_ping_factory_method),
+ ("record_event_function_name", generate_event_record_function_name),
+ ),
+ )
+
+ # Go through all metrics in objs and build a map of
+ # ping->list of metric categories->list of metrics
+ # for easier processing in the template.
+ ping_to_metrics: Dict[str, Dict[str, List[metrics.Metric]]] = defaultdict(dict)
+ for _category_key, category_val in objs.items():
+ for _metric_name, metric in category_val.items():
+ if isinstance(metric, metrics.Metric):
+ if metric.type not in SUPPORTED_METRIC_TYPES:
+ print(
+ "❌ Ignoring unsupported metric type: "
+ + f"{metric.type}:{metric.name}."
+ + " Reach out to Glean team to add support for this"
+ + " metric type."
+ )
+ continue
+ for ping in metric.send_in_pings:
+ metrics_by_type = ping_to_metrics[ping]
+ metrics_list = metrics_by_type.setdefault(metric.type, [])
+ metrics_list.append(metric)
+
+ for ping, metrics_by_type in ping_to_metrics.items():
+ if "event" not in metrics_by_type:
+ print(
+ f"❌ No event metrics found for ping: {ping}."
+ + " At least one event metric is required."
+ )
+ return
+
+ extension = ".py"
+ filepath = output_dir / ("server_events" + extension)
+ with filepath.open("w", encoding="utf-8") as fd:
+ fd.write(template.render(parser_version=__version__, pings=ping_to_metrics))
+
+ # create an empty `__init__.py` file to make the directory a package
+ init_file = output_dir / "__init__.py"
+ with init_file.open("w", encoding="utf-8") as fd:
+ fd.write("")
diff --git a/third_party/python/glean_parser/glean_parser/rust.py b/third_party/python/glean_parser/glean_parser/rust.py
index eb3355e382..6dd4426d84 100644
--- a/third_party/python/glean_parser/glean_parser/rust.py
+++ b/third_party/python/glean_parser/glean_parser/rust.py
@@ -65,7 +65,7 @@ def rust_datatypes_filter(value):
elif isinstance(value, metrics.CowString):
yield f'::std::borrow::Cow::from("{value.inner}")'
elif isinstance(value, str):
- yield f'"{value}".into()'
+ yield f"{json.dumps(value)}.into()"
elif isinstance(value, metrics.Rate):
yield "CommonMetricData("
first = True
@@ -115,6 +115,11 @@ def type_name(obj):
return "{}<{}>".format(class_name(obj.type), generic)
+ generate_structure = getattr(obj, "_generate_structure", [])
+ if len(generate_structure):
+ generic = util.Camelize(obj.name) + "Object"
+ return "{}<{}>".format(class_name(obj.type), generic)
+
return class_name(obj.type)
@@ -133,6 +138,21 @@ def extra_type_name(typ: str) -> str:
return "UNSUPPORTED"
+def structure_type_name(typ: str) -> str:
+ """
+ Returns the corresponding Rust type for structure items.
+ """
+
+ if typ == "boolean":
+ return "bool"
+ elif typ == "string":
+ return "String"
+ elif typ == "number":
+ return "i64"
+ else:
+ return "UNSUPPORTED"
+
+
def class_name(obj_type):
"""
Returns the Rust class name for a given metric or ping type.
@@ -190,6 +210,7 @@ def output_rust(
("camelize", util.camelize),
("type_name", type_name),
("extra_type_name", extra_type_name),
+ ("structure_type_name", structure_type_name),
("ctor", ctor),
("extra_keys", extra_keys),
),
diff --git a/third_party/python/glean_parser/glean_parser/schemas/metrics.2-0-0.schema.yaml b/third_party/python/glean_parser/glean_parser/schemas/metrics.2-0-0.schema.yaml
index 0bc8d500c6..0e785c5303 100644
--- a/third_party/python/glean_parser/glean_parser/schemas/metrics.2-0-0.schema.yaml
+++ b/third_party/python/glean_parser/glean_parser/schemas/metrics.2-0-0.schema.yaml
@@ -119,6 +119,9 @@ definitions:
- `text`: Record long text data.
+ - `object`: Record structured data based on a pre-defined schema
+ Additional properties: `structure`.
+
type: string
enum:
- event
@@ -140,6 +143,7 @@ definitions:
- labeled_counter
- rate
- text
+ - object
description:
title: Description
@@ -567,6 +571,15 @@ definitions:
so glean_parser can find it.
type: string
+ structure:
+ title: A subset of a JSON schema definition
+ description: |
+ The expected structure of data, defined in a strict subset of
+ YAML-dialect JSON Schema (Draft 7) supporting keys "type"
+ (only values "object", "array", "number", "string", and "boolean"),
+ "properties", and "items".
+ type: object
+
required:
- type
- bugs
diff --git a/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml b/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml
index 2f25405d45..6679a8066b 100644
--- a/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml
+++ b/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml
@@ -84,6 +84,18 @@ additionalProperties:
When `false` Glean uses minute-precise timestamps for
the ping's start/end time.
type: boolean
+ include_info_sections:
+ title: Include Info Sections
+ description: |
+ When `true`, assemble and include the `client_info` and `ping_info`
+ sections in the ping on submission.
+ When `false`, omit the `client_info` and `ping_info` sections when
+ assembling the ping on submission.
+ Defaults to `true`.
+
+ Interaction with `include_client_id`: `include_client_id` only takes
+ effect when `metadata.include_info_sections` is `true`.
+ type: boolean
default: {}
@@ -93,6 +105,9 @@ additionalProperties:
**Required.**
When `true`, include the `client_id` value in the ping.
+
+ Interaction with `metadata.include_info_sections`: `include_client_id`
+ only takes effect when `metadata.include_info_sections` is `true`.
type: boolean
send_if_empty:
diff --git a/third_party/python/glean_parser/glean_parser/templates/go_server.jinja2 b/third_party/python/glean_parser/glean_parser/templates/go_server.jinja2
new file mode 100644
index 0000000000..0a26831b0f
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/templates/go_server.jinja2
@@ -0,0 +1,225 @@
+{# The final Go code is autogenerated, but this template is not. Please file bugs! #}
+package glean
+
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+// AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT.
+
+// required imports
+import (
+ "encoding/json"
+ "fmt"
+ "strconv"
+ "time"
+
+ "github.com/google/uuid"
+)
+
+// log type string used to identify logs to process in the Moz Data Pipeline
+var gleanEventMozlogType string = "glean-server-event"
+
+type GleanEventsLogger struct {
+ AppID string // Application Id to identify application per Glean standards
+ AppDisplayVersion string // Version of application emitting the event
+ AppChannel string // Channel to differentiate logs from prod/beta/staging/devel
+}
+
+// exported type for public method parameters
+type RequestInfo struct {
+ UserAgent string
+ IpAddress string
+}
+
+// default empty values will be omitted in json from ping struct definition
+var defaultRequestInfo = RequestInfo{
+ UserAgent: "",
+ IpAddress: "",
+}
+
+// structs to construct the glean ping
+type clientInfo struct {
+ TelemetrySDKBuild string `json:"telemetry_sdk_build"`
+ FirstRunDate string `json:"first_run_date"`
+ OS string `json:"os"`
+ OSVersion string `json:"os_version"`
+ Architecture string `json:"architecture"`
+ AppBuild string `json:"app_build"`
+ AppDisplayVersion string `json:"app_display_version"`
+ AppChannel string `json:"app_channel"`
+}
+
+type pingInfo struct {
+ Seq int `json:"seq"`
+ StartTime string `json:"start_time"`
+ EndTime string `json:"end_time"`
+}
+
+type ping struct {
+ DocumentNamespace string `json:"document_namespace"`
+ DocumentType string `json:"document_type"`
+ DocumentVersion string `json:"document_version"`
+ DocumentID string `json:"document_id"`
+ UserAgent string `json:"user_agent,omitempty"`
+ IpAddress string `json:"ip_address,omitempty"`
+ Payload string `json:"payload"`
+}
+
+type metrics map[string]map[string]interface{}
+
+type pingPayload struct {
+ ClientInfo clientInfo `json:"client_info"`
+ PingInfo pingInfo `json:"ping_info"`
+ Metrics metrics `json:"metrics"`
+ Events []gleanEvent `json:"events"`
+}
+
+type gleanEvent struct {
+ Category string `json:"category"`
+ Name string `json:"name"`
+ Timestamp int64 `json:"timestamp"`
+ Extra map[string]string `json:"extra"`
+}
+
+type logEnvelope struct {
+ Timestamp string
+ Logger string
+ Type string
+ Fields ping
+}
+
+func (g GleanEventsLogger) createClientInfo() clientInfo {
+ // Fields with default values are required in the Glean schema, but not used in server context
+ return clientInfo{
+ TelemetrySDKBuild: "glean_parser v{{ parser_version }}",
+ FirstRunDate: "Unknown",
+ OS: "Unknown",
+ OSVersion: "Unknown",
+ Architecture: "Unknown",
+ AppBuild: "Unknown",
+ AppDisplayVersion: g.AppDisplayVersion,
+ AppChannel: g.AppChannel,
+ }
+}
+
+func createPingInfo() pingInfo {
+ {# times are ISO-8601 strings, e.g. "2023-12-19T22:09:17.440Z" #}
+ var now = time.Now().UTC().Format("2006-01-02T15:04:05.000Z")
+ return pingInfo{
+ Seq: 0,
+ StartTime: now,
+ EndTime: now,
+ }
+}
+
+func (g GleanEventsLogger) createPing(documentType string, config RequestInfo, payload pingPayload) ping {
+ var payloadJson, payloadErr = json.Marshal(payload)
+ if payloadErr != nil {
+ panic("Unable to marshal payload to json")
+ }
+ var documentId = uuid.New()
+ return ping{
+ DocumentNamespace: g.AppID,
+ DocumentType: documentType,
+ DocumentVersion: "1",
+ DocumentID: documentId.String(),
+ UserAgent: config.UserAgent,
+ IpAddress: config.IpAddress,
+ Payload: string(payloadJson),
+ }
+}
+
+// method called by each event method.
+// construct the ping, wrap it in the envelope, and print to stdout
+func (g GleanEventsLogger) record(
+ documentType string,
+ requestInfo RequestInfo,
+ metrics metrics,
+ events []gleanEvent,
+) {
+ var telemetryPayload = pingPayload{
+ ClientInfo: g.createClientInfo(),
+ PingInfo: createPingInfo(),
+ Metrics: metrics,
+ Events: events,
+ }
+
+ var ping = g.createPing(documentType, requestInfo, telemetryPayload)
+
+ var envelope = logEnvelope{
+ Timestamp: strconv.FormatInt(time.Now().UnixNano(), 10),
+ Logger: "glean",
+ Type: gleanEventMozlogType,
+ Fields: ping,
+ }
+ var envelopeJson, envelopeErr = json.Marshal(envelope)
+ if envelopeErr != nil {
+ panic("Unable to marshal log envelope to json")
+ }
+ fmt.Println(string(envelopeJson))
+}
+
+{% for event in events_ping["event"] %}
+type {{ event|event_type_name }} struct {
+ {% for metric_type, metrics in events_ping.items() %}
+ {% if metric_type != 'event' %}
+ {% for metric in metrics %}
+ {{ metric|metric_argument_name }} {{ metric.type|go_metric_type }} // {{ metric.description|clean_string }}
+ {% endfor %}
+ {% endif %}
+ {% endfor %}
+ {% for extra, metadata in event.extra_keys.items() %}
+ {{ extra|event_extra_name }} {{ metadata.type|go_metric_type }} // {{ metadata.description|clean_string }}
+ {% endfor %}
+}
+
+// Record and submit an {{ event|event_type_name }} event.
+// {{ event.description|clean_string }}
+func (g GleanEventsLogger) Record{{ event|event_type_name }}(
+ requestInfo RequestInfo,
+ params {{ event|event_type_name }},
+) {
+ var metrics = metrics{
+ {% for metric_type, metrics in events_ping.items() %}
+ {% if metric_type != 'event' %}
+ "{{ metric_type }}": {
+ {% for metric in metrics %}
+ "{{ metric|metric_name }}": params.{{ metric|metric_argument_name }},
+ {% endfor %}
+ },
+ {% endif %}
+ {% endfor %}
+ }
+ var extraKeys = map[string]string{
+ {% for extra, metadata in event.extra_keys.items() %}
+ {# convert all extra fields to string for submission #}
+ {% if metadata.type == 'boolean' %}
+ "{{ extra }}": fmt.Sprintf("%t", params.{{ extra|event_extra_name }}),
+ {% elif metadata.type == 'quantity' %}
+ "{{ extra }}": fmt.Sprintf("%d", params.{{ extra|event_extra_name }}),
+ {% else %}
+ "{{ extra }}": params.{{ extra|event_extra_name }},
+ {% endif %}
+ {% endfor %}
+ }
+ var events = []gleanEvent{
+ gleanEvent{
+ Category: "{{ event.category }}",
+ Name: "{{ event.name }}",
+ Timestamp: time.Now().UnixMilli(),
+ Extra: extraKeys,
+ },
+ }
+ g.record("events", requestInfo, metrics, events)
+}
+
+// Record and submit an {{ event|event_type_name }} event omitting user request info
+// {{ event.description|clean_string }}
+func (g GleanEventsLogger) Record{{ event|event_type_name }}WithoutUserInfo(
+ params {{ event|event_type_name }},
+) {
+ g.Record{{ event|event_type_name }}(defaultRequestInfo, params)
+}
+
+{% endfor %}
diff --git a/third_party/python/glean_parser/glean_parser/templates/python_server.jinja2 b/third_party/python/glean_parser/glean_parser/templates/python_server.jinja2
new file mode 100644
index 0000000000..689fab2109
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/templates/python_server.jinja2
@@ -0,0 +1,194 @@
+{# The final Go code is autogenerated, but this template is not. Please file bugs! #}
+"""
+This Source Code Form is subject to the terms of the Mozilla Public
+License, v. 2.0. If a copy of the MPL was not distributed with this
+file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT. DO NOT COMMIT.
+"""
+
+from __future__ import annotations
+from datetime import datetime, timezone
+from typing import Any
+from uuid import uuid4
+import json
+
+GLEAN_EVENT_MOZLOG_TYPE = "glean-server-event"
+
+
+{% for ping, metrics_by_type in pings.items() %}
+class {{ ping|camelize }}ServerEventLogger:
+ def __init__(
+ self, application_id: str, app_display_version: str, channel: str
+ ) -> None:
+ """
+ Create {{ ping|camelize }}ServerEventLogger instance.
+
+ :param str application_id: The application ID.
+ :param str app_display_version: The application display version.
+ :param str channel: The channel.
+ """
+ self._application_id = application_id
+ self._app_display_version = app_display_version
+ self._channel = channel
+
+ def _record(
+ self,
+ user_agent: str,
+ ip_address: str,
+ {% for metric_type, metrics in metrics_by_type.items() %}
+ {% if metric_type != 'event' %}
+ {% for metric in metrics %}
+ {{ metric.category }}_{{ metric.name }}: {{ metric.type|py_metric_type }},
+ {% endfor %}
+ {% endif %}
+ {% endfor %}
+ event: dict[str, Any]
+ ) -> None:
+ now = datetime.now(timezone.utc)
+ timestamp = now.isoformat()
+ event["timestamp"] = int(1000.0 * now.timestamp()) # Milliseconds since epoch
+ event_payload = {
+ "metrics": {
+ {% for metric_type, metrics in metrics_by_type.items() %}
+ {% if metric_type != 'event' %}
+ "{{ metric_type }}": {
+ {% for metric in metrics %}
+ "{{ metric.category }}.{{ metric.name }}": {{ metric.category }}_{{ metric.name }},
+ {% endfor %}
+ },
+ {% endif %}
+ {% endfor %}
+ },
+ "events": [event],
+ "ping_info": {
+ # seq is required in the Glean schema, however is not useful in server context
+ "seq": 0,
+ "start_time": timestamp,
+ "end_time": timestamp,
+ },
+ # `Unknown` fields below are required in the Glean schema, however they are
+ # not useful in server context
+ "client_info": {
+ "telemetry_sdk_build": "glean_parser v{{ parser_version }}",
+ "first_run_date": "Unknown",
+ "os": "Unknown",
+ "os_version": "Unknown",
+ "architecture": "Unknown",
+ "app_build": "Unknown",
+ "app_display_version": self._app_display_version,
+ "app_channel": self._channel,
+ },
+ }
+ event_payload_serialized = json.dumps(event_payload)
+
+ # This is the message structure that Decoder expects:
+ # https://github.com/mozilla/gcp-ingestion/pull/2400
+ ping = {
+ "document_namespace": self._application_id,
+ "document_type": "{{ ping }}",
+ "document_version": "1",
+ "document_id": str(uuid4()),
+ "user_agent": user_agent,
+ "ip_address": ip_address,
+ "payload": event_payload_serialized,
+ }
+
+
+ self.emit_record(now, ping)
+
+ def emit_record(self, now: datetime, ping:dict[str, Any]) -> None:
+ """Log the ping to STDOUT.
+ Applications might want to override this method to use their own logging.
+ If doing so, make sure to log the ping as JSON, and to include the
+ `Type: GLEAN_EVENT_MOZLOG_TYPE`."""
+ ping_envelope = {
+ "Timestamp": now.isoformat(),
+ "Logger": "glean",
+ "Type": GLEAN_EVENT_MOZLOG_TYPE,
+ "Fields": ping,
+ }
+ ping_envelope_serialized = json.dumps(ping_envelope)
+
+ print(ping_envelope_serialized)
+
+ {% for event in metrics_by_type["event"] %}
+ def {{ event|record_event_function_name }}(
+ self,
+ user_agent: str,
+ ip_address: str,
+ {% for metric_type, metrics in metrics_by_type.items() %}
+ {% if metric_type != 'event' %}
+ {% for metric in metrics %}
+ {{ metric.category }}_{{ metric.name }}: {{ metric.type|py_metric_type }},
+ {% endfor %}
+ {% endif %}
+ {% endfor %}
+ {% for extra, metadata in event.extra_keys.items() %}
+ {{ extra }}: {{ metadata.type|py_metric_type }},
+ {% endfor %}
+ ) -> None:
+ """
+ Record and submit a {{ event.category }}_{{ event.name }} event:
+ {{ event.description|clean_string }}
+ Event is logged to STDOUT via `print`.
+
+ :param str user_agent: The user agent.
+ :param str ip_address: The IP address. Will be used to decode Geo information
+ and scrubbed at ingestion.
+ {% for metric_type, metrics in metrics_by_type.items() %}
+ {% if metric_type != 'event' %}
+ {% for metric in metrics %}
+ :param {{ metric.type|py_metric_type }} {{ metric.category }}_{{ metric.name }}: {{ metric.description|clean_string }}
+ {% endfor %}
+ {% endif %}
+ {% endfor %}
+ {% if event.extra_keys %}
+ {% for extra, metadata in event.extra_keys.items() %}
+ :param {{ metadata.type|py_metric_type }} {{ extra }}: {{ metadata.description|clean_string }}
+ {% endfor %}
+ {% endif %}
+ """
+ event = {
+ "category": "{{ event.category }}",
+ "name": "{{ event.name }}",
+ {% if event.extra_keys %}
+ "extra": {
+ {% for extra, metadata in event.extra_keys.items() %}
+ "{{ extra }}": str({{ extra }}){% if 'bool' == metadata.type|py_metric_type %}.lower(){% endif %},
+ {% endfor %}
+ },
+ {% endif %}
+ }
+ self._record(
+ user_agent,
+ ip_address,
+ {% for metric_type, metrics in metrics_by_type.items() %}
+ {% if metric_type != 'event' %}
+ {% for metric in metrics %}
+ {{ metric.category }}_{{ metric.name }},
+ {% endfor %}
+ {% endif %}
+ {% endfor %}
+ event
+ )
+ {% endfor %}
+{% endfor %}
+
+{% for ping in pings %}
+def {{ ping|factory_method }}(
+ application_id: str,
+ app_display_version: str,
+ channel: str,
+) -> {{ ping|camelize }}ServerEventLogger:
+ """
+ Factory function that creates an instance of Glean Server Event Logger to record
+ `{{ ping }}` ping events.
+ :param str application_id: The application ID.
+ :param str app_display_version: The application display version.
+ :param str channel: The channel.
+ :return: An instance of {{ ping|camelize }}ServerEventLogger.
+ :rtype: {{ ping|camelize }}ServerEventLogger
+ """
+ return {{ ping|camelize }}ServerEventLogger(application_id, app_display_version, channel)
+{% endfor %}
diff --git a/third_party/python/glean_parser/glean_parser/templates/rust.jinja2 b/third_party/python/glean_parser/glean_parser/templates/rust.jinja2
index 51e458cddf..4c54dd2b2c 100644
--- a/third_party/python/glean_parser/glean_parser/templates/rust.jinja2
+++ b/third_party/python/glean_parser/glean_parser/templates/rust.jinja2
@@ -8,6 +8,49 @@ Jinja2 template is not. Please file bugs! #}
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+{%- macro generate_structure(name, struct) %}
+{% if struct.type == "array" %}
+ pub type {{ name }} = Vec<{{ name }}Item>;
+
+ {{ generate_structure(name ~ "Item", struct["items"]) }}
+
+{% elif struct.type == "object" %}
+ #[derive(Debug, Hash, Eq, PartialEq, ::glean::traits::__serde::Serialize, ::glean::traits::__serde::Deserialize)]
+ #[serde(crate = "::glean::traits::__serde")]
+ #[serde(deny_unknown_fields)]
+ pub struct {{ name }} {
+ {% for itemname, val in struct.properties.items() %}
+ {% if val.type == "object" %}
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub {{itemname|snake_case}}: Option<{{ name ~ "Item" ~ itemname|Camelize ~ "Object" }}>,
+ {% elif val.type == "array" %}
+ #[serde(skip_serializing_if = "Vec::is_empty")]
+ pub {{itemname|snake_case}}: {{ name ~ "Item" ~ itemname|Camelize }},
+ {% else %}
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub {{itemname|snake_case}}: Option<{{val.type|structure_type_name}}>,
+ {% endif %}
+ {% endfor %}
+ }
+
+ {% for itemname, val in struct.properties.items() %}
+ {% if val.type == "array" %}
+ {% set nested_name = name ~ "Item" ~ itemname|Camelize %}
+ {{ generate_structure(nested_name, val) }}
+ {% elif val.type == "object" %}
+ {% set nested_name = name ~ "Item" ~ itemname|Camelize ~ "Object" %}
+ {{ generate_structure(nested_name, val) }}
+ {% endif %}
+ {% endfor %}
+
+{% else %}
+
+pub type {{ name }} = {{ struct.type|structure_type_name }};
+
+{% endif %}
+
+{% endmacro %}
+
{% macro generate_extra_keys(obj) %}
{% for name, _ in obj["_generate_enums"] %}
{# we always use the `extra` suffix, because we only expose the new event API #}
@@ -44,7 +87,7 @@ impl ExtraKeys for {{ obj.name|Camelize }}{{ suffix }} {
/// {{ obj.description|wordwrap() | replace('\n', '\n/// ') }}
#[rustfmt::skip]
pub static {{ obj.name|snake_case }}: ::glean::private::__export::Lazy<::glean::private::PingType> =
- ::glean::private::__export::Lazy::new(|| ::glean::private::PingType::new("{{ obj.name }}", {{ obj.include_client_id|rust }}, {{ obj.send_if_empty|rust }}, {{ obj.precise_timestamps|rust }}, {{ obj.reason_codes|rust }}));
+ ::glean::private::__export::Lazy::new(|| ::glean::private::PingType::new("{{ obj.name }}", {{ obj.include_client_id|rust }}, {{ obj.send_if_empty|rust }}, {{ obj.precise_timestamps|rust }}, {{ obj.include_info_sections|rust }}, {{ obj.reason_codes|rust }}));
{% endfor %}
{% else %}
pub mod {{ category.name|snake_case }} {
@@ -52,6 +95,10 @@ pub mod {{ category.name|snake_case }} {
use glean::{private::*, traits::ExtraKeys, traits::NoExtraKeys, CommonMetricData, HistogramType, Lifetime, TimeUnit, MemoryUnit};
{% for obj in category.objs.values() %}
+ {% if obj|attr("_generate_structure") %}
+{{ generate_structure(obj.name|Camelize ~ "Object", obj._generate_structure) }}
+ {%- endif %}
+
{% if obj|attr("_generate_enums") %}
{{ generate_extra_keys(obj) }}
{%- endif %}
diff --git a/third_party/python/glean_parser/glean_parser/templates/swift.jinja2 b/third_party/python/glean_parser/glean_parser/templates/swift.jinja2
index 82ad37bf20..714bf20ec2 100644
--- a/third_party/python/glean_parser/glean_parser/templates/swift.jinja2
+++ b/third_party/python/glean_parser/glean_parser/templates/swift.jinja2
@@ -96,6 +96,7 @@ extension {{ namespace }} {
includeClientId: {{obj.include_client_id|swift}},
sendIfEmpty: {{obj.send_if_empty|swift}},
preciseTimestamps: {{obj.precise_timestamps|swift}},
+ includeInfoSections: {{obj.include_info_sections|swift}},
reasonCodes: {{obj.reason_codes|swift}}
)
diff --git a/third_party/python/glean_parser/glean_parser/translate.py b/third_party/python/glean_parser/glean_parser/translate.py
index 021fce47fb..6293a99491 100644
--- a/third_party/python/glean_parser/glean_parser/translate.py
+++ b/third_party/python/glean_parser/glean_parser/translate.py
@@ -17,8 +17,10 @@ from typing import Any, Callable, Dict, Iterable, List, Optional
from . import lint
from . import parser
+from . import go_server
from . import javascript
from . import javascript_server
+from . import python_server
from . import kotlin
from . import markdown
from . import metrics
@@ -54,10 +56,12 @@ class Outputter:
OUTPUTTERS = {
+ "go_server": Outputter(go_server.output_go, []),
"javascript": Outputter(javascript.output_javascript, []),
"typescript": Outputter(javascript.output_typescript, []),
"javascript_server": Outputter(javascript_server.output_javascript, []),
"typescript_server": Outputter(javascript_server.output_typescript, []),
+ "python_server": Outputter(python_server.output_python, []),
"ruby_server": Outputter(ruby_server.output_ruby, []),
"kotlin": Outputter(kotlin.output_kotlin, ["*.kt"]),
"markdown": Outputter(markdown.output_markdown, []),
diff --git a/third_party/python/glean_parser/glean_parser/util.py b/third_party/python/glean_parser/glean_parser/util.py
index edaeed9578..41cda8833d 100644
--- a/third_party/python/glean_parser/glean_parser/util.py
+++ b/third_party/python/glean_parser/glean_parser/util.py
@@ -525,6 +525,7 @@ ping_args = [
"include_client_id",
"send_if_empty",
"precise_timestamps",
+ "include_info_sections",
"reason_codes",
]
diff --git a/third_party/python/poetry.lock b/third_party/python/poetry.lock
index 108b754b8b..67d13cdfc3 100644
--- a/third_party/python/poetry.lock
+++ b/third_party/python/poetry.lock
@@ -592,14 +592,14 @@ files = [
[[package]]
name = "glean-parser"
-version = "11.0.1"
+version = "13.0.0"
description = "Parser tools for Mozilla's Glean telemetry"
category = "main"
optional = false
python-versions = "*"
files = [
- {file = "glean_parser-11.0.1-py3-none-any.whl", hash = "sha256:f6991ba1438909d1fb6f96d3efaaef2a92098eceff39d5d998d3bbb170276c6a"},
- {file = "glean_parser-11.0.1.tar.gz", hash = "sha256:79e523b51b0fddce9f6c6309fa8c64328bb706b2d6c847a53f88a93d01695e43"},
+ {file = "glean_parser-13.0.0-py3-none-any.whl", hash = "sha256:1c1e9d33fae3b804fc066ae6b2ae7ae8f4148cac1e5b248f2c1e2bfc2e3ae520"},
+ {file = "glean_parser-13.0.0.tar.gz", hash = "sha256:833780cab7e057034b352786203af94f21afcb0094cbed6010471f5dc21a5f91"},
]
[package.dependencies]
@@ -771,6 +771,25 @@ files = [
]
[[package]]
+name = "mako"
+version = "1.1.2"
+description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "Mako-1.1.2-py2.py3-none-any.whl", hash = "sha256:8e8b53c71c7e59f3de716b6832c4e401d903af574f6962edbbbf6ecc2a5fe6c9"},
+ {file = "Mako-1.1.2.tar.gz", hash = "sha256:3139c5d64aa5d175dbafb95027057128b5fbd05a40c53999f3905ceb53366d9d"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=0.9.2"
+
+[package.extras]
+babel = ["Babel"]
+lingua = ["lingua"]
+
+[[package]]
name = "markupsafe"
version = "2.0.1"
description = "Safely add untrusted strings to HTML/XML markup."
@@ -1142,7 +1161,6 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
@@ -1606,4 +1624,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (
[metadata]
lock-version = "2.0"
python-versions = "^3.8"
-content-hash = "22aaa5192c95ba8c8effd4a6e39b32d356a790e72e7bae067615bbfe0c4e9896"
+content-hash = "2bb8ac6bdb09e709fe469807ee6ed832f281cbc78dee9edf5d932bee0fde5d4f"
diff --git a/third_party/python/requirements.in b/third_party/python/requirements.in
index fc76e925ba..90a4049dcf 100644
--- a/third_party/python/requirements.in
+++ b/third_party/python/requirements.in
@@ -22,7 +22,7 @@ fluent.migrate==0.13.0
fluent.syntax==0.19.0
# Pin `frozenlist` as it is required for `aiohttp`. Use minimum required version.
frozenlist==1.1.1
-glean_parser==11.0.1
+glean_parser==13.0.0
importlib-metadata==6.0.0
# required for compatibility with Flask >= 2 in tools/tryselect/selectors/chooser
jinja2==3.1.2
@@ -30,6 +30,7 @@ jsmin==3.0.0
json-e==4.5.3
jsonschema==4.17.3
looseversion==1.0.1
+mako==1.1.2
mozilla-repo-urls==0.1.1
mozilla-version==2.0.0
packaging==23.1
diff --git a/third_party/python/requirements.txt b/third_party/python/requirements.txt
index 792d2898f2..e7df6a5d5a 100644
--- a/third_party/python/requirements.txt
+++ b/third_party/python/requirements.txt
@@ -275,9 +275,9 @@ frozenlist==1.1.1 ; python_version >= "3.8" and python_version < "4.0" \
giturlparse==0.10.0 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:04ba1a3a099c3093fa8d24a422913c6a9b2c2cd22bcffc939cf72e3e98f672d7 \
--hash=sha256:2595ab291d30717cda8474b874c9fd509f1b9802ad7f6968c36a45e4b13eb337
-glean-parser==11.0.1 ; python_version >= "3.8" and python_version < "4.0" \
- --hash=sha256:79e523b51b0fddce9f6c6309fa8c64328bb706b2d6c847a53f88a93d01695e43 \
- --hash=sha256:f6991ba1438909d1fb6f96d3efaaef2a92098eceff39d5d998d3bbb170276c6a
+glean-parser==13.0.0 ; python_version >= "3.8" and python_version < "4.0" \
+ --hash=sha256:1c1e9d33fae3b804fc066ae6b2ae7ae8f4148cac1e5b248f2c1e2bfc2e3ae520 \
+ --hash=sha256:833780cab7e057034b352786203af94f21afcb0094cbed6010471f5dc21a5f91
idna==2.10 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \
--hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0
@@ -307,6 +307,9 @@ jsonschema==4.17.3 ; python_version >= "3.8" and python_version < "4.0" \
looseversion==1.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:a205beabd0ffd40488edb9ccb3a39134510fc7c0c2847a25079f559e59c004ac \
--hash=sha256:b339dfde67680e9c5c2e96673e52bee9f94d2f0e1b8f4cbfd86d32311e86b952
+mako==1.1.2 ; python_version >= "3.8" and python_version < "4.0" \
+ --hash=sha256:3139c5d64aa5d175dbafb95027057128b5fbd05a40c53999f3905ceb53366d9d \
+ --hash=sha256:8e8b53c71c7e59f3de716b6832c4e401d903af574f6962edbbbf6ecc2a5fe6c9
markupsafe==2.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298 \
--hash=sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64 \
@@ -489,7 +492,6 @@ pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \
--hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \
--hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \
- --hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \
--hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \
--hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \
--hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \